From f816e1cc2c61f9c300c1d8c50d4ff996c125f249 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 2 Jan 2022 20:48:35 +0600 Subject: [PATCH 0001/1051] update docs (#7196) https://github.com/celery/celery/issues/7182 --- docs/userguide/tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 49c4dd68337..aa788fac5c0 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -345,7 +345,7 @@ The request defines the following attributes: :callbacks: A list of signatures to be called if this task returns successfully. -:errback: A list of signatures to be called if this task fails. +:errbacks: A list of signatures to be called if this task fails. :utc: Set to true the caller has UTC enabled (:setting:`enable_utc`). From 7f9daab9d3007d2a52f7813cf12dd7fa5666c98b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Jan 2022 16:42:33 +0000 Subject: [PATCH 0002/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.29.1 → v2.31.0](https://github.com/asottile/pyupgrade/compare/v2.29.1...v2.31.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 43bde9e08b5..31f86c6d9c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.29.1 + rev: v2.31.0 hooks: - id: pyupgrade args: ["--py37-plus"] From 0620eb2e7ab5537fd8b98ac103750e6fd80cd5b4 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 5 Jan 2022 18:19:41 +0200 Subject: [PATCH 0003/1051] Remove Python 3.4 compatibility code. --- celery/utils/saferepr.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py index adcfc72efca..245d8ef5cfa 100644 --- a/celery/utils/saferepr.py +++ b/celery/utils/saferepr.py @@ -136,14 +136,7 @@ def _repr_binary_bytes(val): return val.decode('utf-8') except UnicodeDecodeError: # possibly not unicode, but binary data so format as hex. - try: - ashex = val.hex - except AttributeError: # pragma: no cover - # Python 3.4 - return val.decode('utf-8', errors='replace') - else: - # Python 3.5+ - return ashex() + return val.hex() def _format_chars(val, maxlen): From 9377e94927d0699de7b8eaa7838589051c2ea87a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Jan 2022 09:35:53 +0600 Subject: [PATCH 0004/1051] update docs to fix #7203 (#7209) --- docs/userguide/tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index aa788fac5c0..92ec69a4717 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1641,7 +1641,7 @@ setting. .. versionadded::4.2 Results can be enabled/disabled on a per-execution basis, by passing the ``ignore_result`` boolean parameter, -when calling ``apply_async`` or ``delay``. +when calling ``apply_async``. .. code-block:: python From bc13e2fdc7fd0a82eaa7e0b89869e4d4ef5051bb Mon Sep 17 00:00:00 2001 From: uuip Date: Wed, 12 Jan 2022 19:01:09 +0800 Subject: [PATCH 0005/1051] call ping to set connection for avoiding error when subscribed_to is empty , call ping to set connection attr for avoiding redis parse_response error --- celery/backends/redis.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 7eedc4c089b..252ecfb58d2 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -112,6 +112,8 @@ def _reconnect_pubsub(self): ) if self.subscribed_to: self._pubsub.subscribe(*self.subscribed_to) + else: + self._pubsub.ping() @contextmanager def reconnect_on_error(self): From 9387c528202d8cce26a9875f48f15e573d58d84b Mon Sep 17 00:00:00 2001 From: Keith Gray Date: Mon, 10 Jan 2022 13:44:23 -0600 Subject: [PATCH 0006/1051] Documentation updates related to task names --- docs/userguide/periodic-tasks.rst | 4 ++++ docs/userguide/tasks.rst | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index 718f4c8af90..089135273bd 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -170,6 +170,10 @@ Available Fields The name of the task to execute. + Task names are described in the :ref:`task-names` section of the User Guide. + Note that this is not the import path of the task, even though the default + naming pattern is built like it is. + * `schedule` The frequency of execution. diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 92ec69a4717..cb1dd310630 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -237,6 +237,12 @@ named :file:`tasks.py`: >>> add.name 'tasks.add' +.. note:: + + You can use the `inspect` command in a worker to view the names of + all registered tasks. See the `inspect registered` command in the + :ref:`monitoring-control` section of the User Guide. + .. _task-name-generator-info: Changing the automatic naming behavior From 95015a1d5a60d94d8e1e02da4b9cf16416c747e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20=C4=8Ciha=C5=99?= Date: Tue, 11 Jan 2022 13:36:11 +0100 Subject: [PATCH 0007/1051] Use importlib instead of deprecated pkg_resources This avoids runtime dependency on setuptools. --- celery/app/backends.py | 3 +-- celery/beat.py | 3 +-- celery/bin/celery.py | 8 ++++++-- celery/utils/imports.py | 12 ++++++------ docs/userguide/extending.rst | 2 +- requirements/default.txt | 2 +- 6 files changed, 16 insertions(+), 14 deletions(-) diff --git a/celery/app/backends.py b/celery/app/backends.py index 8f0390bf2b7..ab40ccaed9f 100644 --- a/celery/app/backends.py +++ b/celery/app/backends.py @@ -44,8 +44,7 @@ def by_name(backend=None, loader=None, backend = backend or 'disabled' loader = loader or current_app.loader aliases = dict(BACKEND_ALIASES, **loader.override_backends) - aliases.update( - load_extension_class_names(extension_namespace) or {}) + aliases.update(load_extension_class_names(extension_namespace)) try: cls = symbol_by_name(backend, aliases) except ValueError as exc: diff --git a/celery/beat.py b/celery/beat.py index d8a4fc9e8b2..74537e3469d 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -666,8 +666,7 @@ def stop(self, wait=False): def get_scheduler(self, lazy=False, extension_namespace='celery.beat_schedulers'): filename = self.schedule_filename - aliases = dict( - load_extension_class_names(extension_namespace) or {}) + aliases = dict(load_extension_class_names(extension_namespace)) return symbol_by_name(self.scheduler_cls, aliases=aliases)( app=self.app, schedule_filename=filename, diff --git a/celery/bin/celery.py b/celery/bin/celery.py index c6b862d0f10..2aee6414be4 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -3,12 +3,16 @@ import pathlib import traceback +try: + from importlib.metadata import entry_points +except ImportError: + from importlib_metadata import entry_points + import click import click.exceptions from click.types import ParamType from click_didyoumean import DYMGroup from click_plugins import with_plugins -from pkg_resources import iter_entry_points from celery import VERSION_BANNER from celery.app.utils import find_app @@ -71,7 +75,7 @@ def convert(self, value, param, ctx): APP = App() -@with_plugins(iter_entry_points('celery.commands')) +@with_plugins(entry_points().get('celery.commands', [])) @click.group(cls=DYMGroup, invoke_without_command=True) @click.option('-A', '--app', diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 0303bd3c051..9e841c6e2ea 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -6,6 +6,11 @@ from contextlib import contextmanager from importlib import reload +try: + from importlib.metadata import entry_points +except ImportError: + from importlib_metadata import entry_points + from kombu.utils.imports import symbol_by_name #: Billiard sets this when execv is enabled. @@ -137,12 +142,7 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): - try: - from pkg_resources import iter_entry_points - except ImportError: # pragma: no cover - return - - for ep in iter_entry_points(namespace): + for ep in entry_points().get(namespace, []): yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) diff --git a/docs/userguide/extending.rst b/docs/userguide/extending.rst index 59c8f83401e..ea8c0462598 100644 --- a/docs/userguide/extending.rst +++ b/docs/userguide/extending.rst @@ -829,7 +829,7 @@ New commands can be added to the :program:`celery` umbrella command by using Entry-points is special meta-data that can be added to your packages ``setup.py`` program, -and then after installation, read from the system using the :mod:`pkg_resources` module. +and then after installation, read from the system using the :mod:`importlib` module. Celery recognizes ``celery.commands`` entry-points to install additional sub-commands, where the value of the entry-point must point to a valid click diff --git a/requirements/default.txt b/requirements/default.txt index 509a43d9e5e..0203186c858 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -6,4 +6,4 @@ click>=8.0.3,<9.0 click-didyoumean>=0.0.3 click-repl>=0.2.0 click-plugins>=1.1.1 -setuptools>=59.1.1,<59.7.0 +importlib-metadata>=1.4.0; python_version < '3.8' From 0dd1e470ffe05646877ddf076d2700a8f5a824a9 Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Wed, 19 Jan 2022 04:38:42 +0000 Subject: [PATCH 0008/1051] Clarify relation between visibility timeout & predefined queues in SQS (#7234) * Clarify relation between visbility timeout & predefined queues in SQS * Clarify further * Fix cross-references in SQS page --- docs/getting-started/backends-and-brokers/sqs.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/getting-started/backends-and-brokers/sqs.rst b/docs/getting-started/backends-and-brokers/sqs.rst index ae5e2ff9d17..a9f82686910 100644 --- a/docs/getting-started/backends-and-brokers/sqs.rst +++ b/docs/getting-started/backends-and-brokers/sqs.rst @@ -82,6 +82,8 @@ by configuring the :setting:`broker_transport_options` setting:: http://aws.amazon.com/about-aws/globalinfrastructure/ +.. _sqs-visibility-timeout: + Visibility Timeout ------------------ @@ -95,6 +97,9 @@ This option is set via the :setting:`broker_transport_options` setting:: The default visibility timeout is 30 minutes. +This option is used when creating the SQS queue and has no effect if +using :ref:`predefined queues `. + Polling Interval ---------------- @@ -143,6 +148,8 @@ using the :setting:`broker_transport_options` setting:: broker_transport_options = {'queue_name_prefix': 'celery-'} +.. _predefined-queues: + Predefined Queues ----------------- @@ -161,6 +168,10 @@ setting:: } } +When using this option, the visibility timeout should be set in the SQS queue +(in AWS) rather than via the :ref:`visibility timeout ` +option. + Back-off policy ------------------------ Back-off policy is using SQS visibility timeout mechanism altering the time difference between task retries. From fdb4af3cbf88ab59a3ed25a13b554b718768d178 Mon Sep 17 00:00:00 2001 From: Sami Tahri Date: Thu, 20 Jan 2022 22:28:29 +0100 Subject: [PATCH 0009/1051] fix #7245 uid duplicated in command params --- celery/bin/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/celery/bin/base.py b/celery/bin/base.py index 30358dd8a9a..c41b6f97005 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -179,7 +179,6 @@ def __init__(self, *args, **kwargs): self.params.append(CeleryOption(('-f', '--logfile'), help_group="Daemonization Options")) self.params.append(CeleryOption(('--pidfile',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--uid',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--uid',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--gid',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--umask',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--executable',), help_group="Daemonization Options")) From f36c16f2debd65c2f9c011b07ca72a77b300db4e Mon Sep 17 00:00:00 2001 From: Sygmei <3835355+Sygmei@users.noreply.github.com> Date: Tue, 25 Jan 2022 01:24:00 +0100 Subject: [PATCH 0010/1051] fix typo in exception --- celery/backends/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 86286ca9df5..4fddf05bdb3 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -358,7 +358,7 @@ def exception_to_python(self, exc): try: exc_type = exc['exc_type'] except KeyError as e: - raise ValueError("Exception information must include" + raise ValueError("Exception information must include " "the exception type") from e if exc_module is None: cls = create_exception_cls( From 3a1a48027dbf702d514bc136b6abe922958816ce Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Thu, 20 Jan 2022 19:12:03 +0200 Subject: [PATCH 0011/1051] Annotate test failures in PRs --- requirements/test-ci-base.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 3563008e5ca..26aaa089f31 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,5 @@ pytest-cov +pytest-github-actions-annotate-failures codecov -r extras/redis.txt -r extras/sqlalchemy.txt From 25ca389039f667cdf940c9efe44bdefd1cf70f30 Mon Sep 17 00:00:00 2001 From: Mads Jensen Date: Tue, 25 Jan 2022 12:11:04 +0100 Subject: [PATCH 0012/1051] Set max_line_length in .editorconfig to match flake8 config. (#7263) * Set max_line_length in .editorconfig to match flake8 config. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .editorconfig | 2 +- celery/__init__.py | 3 +-- celery/app/__init__.py | 3 +-- celery/app/base.py | 11 ++++------- celery/app/log.py | 3 +-- celery/app/task.py | 3 +-- celery/app/trace.py | 7 ++----- celery/app/utils.py | 3 +-- celery/apps/multi.py | 3 +-- celery/backends/azureblockblob.py | 3 +-- celery/backends/base.py | 16 +++++----------- celery/backends/cosmosdbsql.py | 3 +-- celery/backends/redis.py | 3 +-- celery/bin/beat.py | 3 +-- celery/bin/call.py | 3 +-- celery/bin/control.py | 3 +-- celery/bin/events.py | 3 +-- celery/bin/migrate.py | 3 +-- celery/bin/purge.py | 3 +-- celery/bin/result.py | 3 +-- celery/bin/shell.py | 3 +-- celery/bin/upgrade.py | 3 +-- celery/bin/worker.py | 6 ++---- celery/canvas.py | 3 +-- celery/exceptions.py | 3 +-- celery/loaders/base.py | 3 +-- celery/schedules.py | 4 ++-- celery/security/__init__.py | 3 +-- celery/utils/collections.py | 3 +-- celery/utils/functional.py | 3 +-- celery/utils/saferepr.py | 3 +-- celery/worker/consumer/consumer.py | 6 ++---- celery/worker/request.py | 3 +-- celery/worker/worker.py | 3 +-- examples/celery_http_gateway/urls.py | 3 +-- t/integration/test_canvas.py | 18 +++++++----------- t/integration/test_tasks.py | 6 ++---- t/unit/app/test_defaults.py | 5 ++--- t/unit/app/test_log.py | 3 +-- t/unit/app/test_schedules.py | 3 +-- t/unit/apps/test_multi.py | 3 +-- t/unit/backends/test_base.py | 6 ++---- t/unit/backends/test_database.py | 6 ++---- t/unit/backends/test_redis.py | 3 +-- t/unit/concurrency/test_prefork.py | 4 +--- t/unit/conftest.py | 6 ++---- t/unit/contrib/test_migrate.py | 9 +++------ t/unit/events/test_state.py | 3 +-- t/unit/fixups/test_django.py | 3 +-- t/unit/tasks/test_canvas.py | 5 ++--- t/unit/tasks/test_result.py | 6 ++---- t/unit/tasks/test_trace.py | 12 ++++-------- t/unit/utils/test_collections.py | 5 ++--- t/unit/utils/test_functional.py | 6 ++---- t/unit/utils/test_imports.py | 3 +-- t/unit/utils/test_platforms.py | 14 +++++--------- t/unit/utils/test_serialization.py | 7 ++----- t/unit/utils/test_text.py | 3 +-- t/unit/utils/test_threads.py | 3 +-- t/unit/utils/test_time.py | 9 +++------ t/unit/worker/test_consumer.py | 3 +-- t/unit/worker/test_loops.py | 3 +-- t/unit/worker/test_request.py | 9 +++------ t/unit/worker/test_worker.py | 3 +-- 64 files changed, 104 insertions(+), 201 deletions(-) diff --git a/.editorconfig b/.editorconfig index 38d889273b2..140566f1819 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,7 +9,7 @@ trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf -max_line_length = 78 +max_line_length = 117 [Makefile] indent_style = tab diff --git a/celery/__init__.py b/celery/__init__.py index df1fe1a6c05..abe15b29114 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -70,8 +70,7 @@ def debug_import(name, locals=None, globals=None, from celery.app.base import Celery from celery.app.task import Task from celery.app.utils import bugreport - from celery.canvas import (chain, chord, chunks, group, # noqa - maybe_signature, signature, subtask, xmap, + from celery.canvas import (chain, chord, chunks, group, maybe_signature, signature, subtask, xmap, # noqa xstarmap) from celery.utils import uuid diff --git a/celery/app/__init__.py b/celery/app/__init__.py index 2bb1c13ff7f..4a946d93053 100644 --- a/celery/app/__init__.py +++ b/celery/app/__init__.py @@ -1,7 +1,6 @@ """Celery Application.""" from celery import _state -from celery._state import (app_or_default, disable_trace, enable_trace, - pop_current_task, push_current_task) +from celery._state import app_or_default, disable_trace, enable_trace, pop_current_task, push_current_task from celery.local import Proxy from .base import Celery diff --git a/celery/app/base.py b/celery/app/base.py index 671fc846ac6..bd222651b4d 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -18,10 +18,8 @@ from vine import starpromise from celery import platforms, signals -from celery._state import (_announce_app_finalized, _deregister_app, - _register_app, _set_current_app, _task_stack, - connect_on_app_finalize, get_current_app, - get_current_worker_task, set_default_app) +from celery._state import (_announce_app_finalized, _deregister_app, _register_app, _set_current_app, _task_stack, + connect_on_app_finalize, get_current_app, get_current_worker_task, set_default_app) from celery.exceptions import AlwaysEagerIgnored, ImproperlyConfigured from celery.loaders import get_loader_cls from celery.local import PromiseProxy, maybe_evaluate @@ -41,9 +39,8 @@ from .autoretry import add_autoretry_behaviour from .defaults import DEFAULT_SECURITY_DIGEST, find_deprecated_settings from .registry import TaskRegistry -from .utils import (AppPickler, Settings, _new_key_to_old, _old_key_to_new, - _unpickle_app, _unpickle_app_v2, appstr, bugreport, - detect_settings) +from .utils import (AppPickler, Settings, _new_key_to_old, _old_key_to_new, _unpickle_app, _unpickle_app_v2, appstr, + bugreport, detect_settings) __all__ = ('Celery',) diff --git a/celery/app/log.py b/celery/app/log.py index 4ca9bc7ccd1..6e03722b8a7 100644 --- a/celery/app/log.py +++ b/celery/app/log.py @@ -19,8 +19,7 @@ from celery.exceptions import CDeprecationWarning, CPendingDeprecationWarning from celery.local import class_property from celery.platforms import isatty -from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger, - get_multiprocessing_logger, mlevel, +from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger, get_multiprocessing_logger, mlevel, reset_multiprocessing_logger) from celery.utils.nodenames import node_format from celery.utils.term import colored diff --git a/celery/app/task.py b/celery/app/task.py index 9a6796e6bb3..de25715fc55 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -9,8 +9,7 @@ from celery import current_app, states from celery._state import _task_stack from celery.canvas import _chain, group, signature -from celery.exceptions import (Ignore, ImproperlyConfigured, - MaxRetriesExceededError, Reject, Retry) +from celery.exceptions import Ignore, ImproperlyConfigured, MaxRetriesExceededError, Reject, Retry from celery.local import class_property from celery.result import EagerResult, denied_join_result from celery.utils import abstract diff --git a/celery/app/trace.py b/celery/app/trace.py index 7b5b00b8c95..778c4bb1994 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -20,16 +20,13 @@ from celery._state import _task_stack from celery.app.task import Context from celery.app.task import Task as BaseTask -from celery.exceptions import (BackendGetMetaError, Ignore, InvalidTaskError, - Reject, Retry) +from celery.exceptions import BackendGetMetaError, Ignore, InvalidTaskError, Reject, Retry from celery.result import AsyncResult from celery.utils.log import get_logger from celery.utils.nodenames import gethostname from celery.utils.objects import mro_lookup from celery.utils.saferepr import saferepr -from celery.utils.serialization import (get_pickleable_etype, - get_pickleable_exception, - get_pickled_exception) +from celery.utils.serialization import get_pickleable_etype, get_pickleable_exception, get_pickled_exception # ## --- # This is the heart of the worker, the inner loop so to speak. diff --git a/celery/app/utils.py b/celery/app/utils.py index 8b72652e708..c825045ade7 100644 --- a/celery/app/utils.py +++ b/celery/app/utils.py @@ -15,8 +15,7 @@ from celery.utils.imports import import_from_cwd, qualname, symbol_by_name from celery.utils.text import pretty -from .defaults import (_OLD_DEFAULTS, _OLD_SETTING_KEYS, _TO_NEW_KEY, - _TO_OLD_KEY, DEFAULTS, SETTING_KEYS, find) +from .defaults import _OLD_DEFAULTS, _OLD_SETTING_KEYS, _TO_NEW_KEY, _TO_OLD_KEY, DEFAULTS, SETTING_KEYS, find __all__ = ( 'Settings', 'appstr', 'bugreport', diff --git a/celery/apps/multi.py b/celery/apps/multi.py index 613743426e5..1fe60042251 100644 --- a/celery/apps/multi.py +++ b/celery/apps/multi.py @@ -13,8 +13,7 @@ from kombu.utils.objects import cached_property from celery.platforms import IS_WINDOWS, Pidfile, signal_name -from celery.utils.nodenames import (gethostname, host_format, node_format, - nodesplit) +from celery.utils.nodenames import gethostname, host_format, node_format, nodesplit from celery.utils.saferepr import saferepr __all__ = ('Cluster', 'Node') diff --git a/celery/backends/azureblockblob.py b/celery/backends/azureblockblob.py index e7d2c231808..862777b5fdb 100644 --- a/celery/backends/azureblockblob.py +++ b/celery/backends/azureblockblob.py @@ -9,8 +9,7 @@ try: import azure.storage.blob as azurestorage - from azure.core.exceptions import (ResourceExistsError, - ResourceNotFoundError) + from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError from azure.storage.blob import BlobServiceClient except ImportError: azurestorage = None diff --git a/celery/backends/base.py b/celery/backends/base.py index 4fddf05bdb3..20e890c7be5 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -23,20 +23,14 @@ from celery import current_app, group, maybe_signature, states from celery._state import get_current_task from celery.app.task import Context -from celery.exceptions import (BackendGetMetaError, BackendStoreError, - ChordError, ImproperlyConfigured, - NotRegistered, SecurityError, TaskRevokedError, - TimeoutError) -from celery.result import (GroupResult, ResultBase, ResultSet, - allow_join_result, result_from_tuple) +from celery.exceptions import (BackendGetMetaError, BackendStoreError, ChordError, ImproperlyConfigured, + NotRegistered, SecurityError, TaskRevokedError, TimeoutError) +from celery.result import GroupResult, ResultBase, ResultSet, allow_join_result, result_from_tuple from celery.utils.collections import BufferMap from celery.utils.functional import LRUCache, arity_greater from celery.utils.log import get_logger -from celery.utils.serialization import (create_exception_cls, - ensure_serializable, - get_pickleable_exception, - get_pickled_exception, - raise_with_context) +from celery.utils.serialization import (create_exception_cls, ensure_serializable, get_pickleable_exception, + get_pickled_exception, raise_with_context) from celery.utils.time import get_exponential_backoff_interval __all__ = ('BaseBackend', 'KeyValueStoreBackend', 'DisabledBackend') diff --git a/celery/backends/cosmosdbsql.py b/celery/backends/cosmosdbsql.py index 344e46ede0c..cfe560697a9 100644 --- a/celery/backends/cosmosdbsql.py +++ b/celery/backends/cosmosdbsql.py @@ -11,8 +11,7 @@ try: import pydocumentdb from pydocumentdb.document_client import DocumentClient - from pydocumentdb.documents import (ConnectionPolicy, ConsistencyLevel, - PartitionKind) + from pydocumentdb.documents import ConnectionPolicy, ConsistencyLevel, PartitionKind from pydocumentdb.errors import HTTPFailure from pydocumentdb.retry_options import RetryOptions except ImportError: # pragma: no cover diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 252ecfb58d2..a193181ba8f 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -12,8 +12,7 @@ from celery import states from celery._state import task_join_will_block from celery.canvas import maybe_signature -from celery.exceptions import (BackendStoreError, ChordError, - ImproperlyConfigured) +from celery.exceptions import BackendStoreError, ChordError, ImproperlyConfigured from celery.result import GroupResult, allow_join_result from celery.utils.functional import _regen, dictfilter from celery.utils.log import get_logger diff --git a/celery/bin/beat.py b/celery/bin/beat.py index 145b44e9720..9fcdc760794 100644 --- a/celery/bin/beat.py +++ b/celery/bin/beat.py @@ -3,8 +3,7 @@ import click -from celery.bin.base import (LOG_LEVEL, CeleryDaemonCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import LOG_LEVEL, CeleryDaemonCommand, CeleryOption, handle_preload_options from celery.platforms import detached, maybe_drop_privileges diff --git a/celery/bin/call.py b/celery/bin/call.py index a04651bdd4f..b1df9502891 100644 --- a/celery/bin/call.py +++ b/celery/bin/call.py @@ -1,8 +1,7 @@ """The ``celery call`` program used to send tasks from the command-line.""" import click -from celery.bin.base import (ISO8601, ISO8601_OR_FLOAT, JSON_ARRAY, - JSON_OBJECT, CeleryCommand, CeleryOption, +from celery.bin.base import (ISO8601, ISO8601_OR_FLOAT, JSON_ARRAY, JSON_OBJECT, CeleryCommand, CeleryOption, handle_preload_options) diff --git a/celery/bin/control.py b/celery/bin/control.py index fbd3730c490..f7bba96ddf0 100644 --- a/celery/bin/control.py +++ b/celery/bin/control.py @@ -4,8 +4,7 @@ import click from kombu.utils.json import dumps -from celery.bin.base import (COMMA_SEPARATED_LIST, CeleryCommand, - CeleryOption, handle_preload_options) +from celery.bin.base import COMMA_SEPARATED_LIST, CeleryCommand, CeleryOption, handle_preload_options from celery.exceptions import CeleryCommandException from celery.platforms import EX_UNAVAILABLE from celery.utils import text diff --git a/celery/bin/events.py b/celery/bin/events.py index fa37c8352fc..89470838bcc 100644 --- a/celery/bin/events.py +++ b/celery/bin/events.py @@ -4,8 +4,7 @@ import click -from celery.bin.base import (LOG_LEVEL, CeleryDaemonCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import LOG_LEVEL, CeleryDaemonCommand, CeleryOption, handle_preload_options from celery.platforms import detached, set_process_title, strargv diff --git a/celery/bin/migrate.py b/celery/bin/migrate.py index febaaaacab2..fc3c88b8e80 100644 --- a/celery/bin/migrate.py +++ b/celery/bin/migrate.py @@ -2,8 +2,7 @@ import click from kombu import Connection -from celery.bin.base import (CeleryCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options from celery.contrib.migrate import migrate_tasks diff --git a/celery/bin/purge.py b/celery/bin/purge.py index 2629ac7eff3..7be1a8241fb 100644 --- a/celery/bin/purge.py +++ b/celery/bin/purge.py @@ -1,8 +1,7 @@ """The ``celery purge`` program, used to delete messages from queues.""" import click -from celery.bin.base import (COMMA_SEPARATED_LIST, CeleryCommand, - CeleryOption, handle_preload_options) +from celery.bin.base import COMMA_SEPARATED_LIST, CeleryCommand, CeleryOption, handle_preload_options from celery.utils import text diff --git a/celery/bin/result.py b/celery/bin/result.py index c126fb588ee..615ee2eb4a4 100644 --- a/celery/bin/result.py +++ b/celery/bin/result.py @@ -1,8 +1,7 @@ """The ``celery result`` program, used to inspect task results.""" import click -from celery.bin.base import (CeleryCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options @click.command(cls=CeleryCommand) diff --git a/celery/bin/shell.py b/celery/bin/shell.py index 378448a24cf..77b14d8a307 100644 --- a/celery/bin/shell.py +++ b/celery/bin/shell.py @@ -6,8 +6,7 @@ import click -from celery.bin.base import (CeleryCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options def _invoke_fallback_shell(locals): diff --git a/celery/bin/upgrade.py b/celery/bin/upgrade.py index cd9a695b702..bbfdb0441f2 100644 --- a/celery/bin/upgrade.py +++ b/celery/bin/upgrade.py @@ -5,8 +5,7 @@ import click from celery.app import defaults -from celery.bin.base import (CeleryCommand, CeleryOption, - handle_preload_options) +from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options from celery.utils.functional import pass1 diff --git a/celery/bin/worker.py b/celery/bin/worker.py index f0629fcaf52..e93f6ed6c0e 100644 --- a/celery/bin/worker.py +++ b/celery/bin/worker.py @@ -8,13 +8,11 @@ from click.types import StringParamType from celery import concurrency -from celery.bin.base import (COMMA_SEPARATED_LIST, LOG_LEVEL, - CeleryDaemonCommand, CeleryOption, +from celery.bin.base import (COMMA_SEPARATED_LIST, LOG_LEVEL, CeleryDaemonCommand, CeleryOption, handle_preload_options) from celery.concurrency.base import BasePool from celery.exceptions import SecurityError -from celery.platforms import (EX_FAILURE, EX_OK, detached, - maybe_drop_privileges) +from celery.platforms import EX_FAILURE, EX_OK, detached, maybe_drop_privileges from celery.utils.log import get_logger from celery.utils.nodenames import default_nodename, host_format, node_format diff --git a/celery/canvas.py b/celery/canvas.py index e0b55389288..a013ba4e9ed 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -26,8 +26,7 @@ from celery.utils.collections import ChainMap from celery.utils.functional import _regen from celery.utils.functional import chunks as _chunks -from celery.utils.functional import (is_list, lookahead, maybe_list, regen, - seq_concat_item, seq_concat_seq) +from celery.utils.functional import is_list, lookahead, maybe_list, regen, seq_concat_item, seq_concat_seq from celery.utils.objects import getitem_property from celery.utils.text import remove_repeating_from_task, truncate diff --git a/celery/exceptions.py b/celery/exceptions.py index 64b017aa7c0..9b6129c19cd 100644 --- a/celery/exceptions.py +++ b/celery/exceptions.py @@ -53,8 +53,7 @@ import numbers -from billiard.exceptions import (SoftTimeLimitExceeded, Terminated, - TimeLimitExceeded, WorkerLostError) +from billiard.exceptions import SoftTimeLimitExceeded, Terminated, TimeLimitExceeded, WorkerLostError from click import ClickException from kombu.exceptions import OperationalError diff --git a/celery/loaders/base.py b/celery/loaders/base.py index 17f165d7c03..aa7139c78af 100644 --- a/celery/loaders/base.py +++ b/celery/loaders/base.py @@ -12,8 +12,7 @@ from celery.exceptions import reraise from celery.utils.collections import DictAttribute, force_mapping from celery.utils.functional import maybe_list -from celery.utils.imports import (NotAPackage, find_module, import_from_cwd, - symbol_by_name) +from celery.utils.imports import NotAPackage, find_module, import_from_cwd, symbol_by_name __all__ = ('BaseLoader',) diff --git a/celery/schedules.py b/celery/schedules.py index 3731b747cee..5ffbf4147e2 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -11,8 +11,8 @@ from . import current_app from .utils.collections import AttributeDict -from .utils.time import (ffwd, humanize_seconds, localize, maybe_make_aware, - maybe_timedelta, remaining, timezone, weekday) +from .utils.time import (ffwd, humanize_seconds, localize, maybe_make_aware, maybe_timedelta, remaining, timezone, + weekday) __all__ = ( 'ParseException', 'schedule', 'crontab', 'crontab_parser', diff --git a/celery/security/__init__.py b/celery/security/__init__.py index 26237856939..8b7f74cc407 100644 --- a/celery/security/__init__.py +++ b/celery/security/__init__.py @@ -1,6 +1,5 @@ """Message Signing Serializer.""" -from kombu.serialization import \ - disable_insecure_serializers as _disable_insecure_serializers +from kombu.serialization import disable_insecure_serializers as _disable_insecure_serializers from kombu.serialization import registry from celery.exceptions import ImproperlyConfigured diff --git a/celery/utils/collections.py b/celery/utils/collections.py index df37d12c3b4..e83e2f40716 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -2,8 +2,7 @@ import time from collections import OrderedDict as _OrderedDict from collections import deque -from collections.abc import (Callable, Mapping, MutableMapping, MutableSet, - Sequence) +from collections.abc import Callable, Mapping, MutableMapping, MutableSet, Sequence from heapq import heapify, heappop, heappush from itertools import chain, count from queue import Empty diff --git a/celery/utils/functional.py b/celery/utils/functional.py index e8a8453cc6e..da866b75dc2 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -5,8 +5,7 @@ from functools import partial from itertools import islice, tee, zip_longest -from kombu.utils.functional import (LRUCache, dictfilter, is_list, lazy, - maybe_evaluate, maybe_list, memoize) +from kombu.utils.functional import LRUCache, dictfilter, is_list, lazy, maybe_evaluate, maybe_list, memoize from vine import promise __all__ = ( diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py index 245d8ef5cfa..de8d15a9b71 100644 --- a/celery/utils/saferepr.py +++ b/celery/utils/saferepr.py @@ -15,8 +15,7 @@ from itertools import chain from numbers import Number from pprint import _recursion -from typing import (Any, AnyStr, Callable, Dict, Iterator, List, Sequence, - Set, Tuple) +from typing import Any, AnyStr, Callable, Dict, Iterator, List, Sequence, Set, Tuple from .text import truncate diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index c72493f5d02..d59f64a88a8 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -22,8 +22,7 @@ from celery import bootsteps, signals from celery.app.trace import build_tracer -from celery.exceptions import (CPendingDeprecationWarning, InvalidTaskError, - NotRegistered) +from celery.exceptions import CPendingDeprecationWarning, InvalidTaskError, NotRegistered from celery.utils.functional import noop from celery.utils.log import get_logger from celery.utils.nodenames import gethostname @@ -31,8 +30,7 @@ from celery.utils.text import truncate from celery.utils.time import humanize_seconds, rate from celery.worker import loops -from celery.worker.state import (active_requests, maybe_shutdown, - reserved_requests, task_reserved) +from celery.worker.state import active_requests, maybe_shutdown, reserved_requests, task_reserved __all__ = ('Consumer', 'Evloop', 'dump_body') diff --git a/celery/worker/request.py b/celery/worker/request.py index fb6d60e6812..b9fcb14bc67 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -16,8 +16,7 @@ from celery import current_app, signals from celery.app.task import Context from celery.app.trace import fast_trace_task, trace_task, trace_task_ret -from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, - TaskRevokedError, Terminated, +from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, TimeLimitExceeded, WorkerLostError) from celery.platforms import signals as _signals from celery.utils.functional import maybe, noop diff --git a/celery/worker/worker.py b/celery/worker/worker.py index f67d1a336da..c0640120613 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -23,8 +23,7 @@ from celery import concurrency as _concurrency from celery import signals from celery.bootsteps import RUN, TERMINATE -from celery.exceptions import (ImproperlyConfigured, TaskRevokedError, - WorkerTerminate) +from celery.exceptions import ImproperlyConfigured, TaskRevokedError, WorkerTerminate from celery.platforms import EX_FAILURE, create_pidlock from celery.utils.imports import reload_from_cwd from celery.utils.log import mlevel diff --git a/examples/celery_http_gateway/urls.py b/examples/celery_http_gateway/urls.py index c916ff8029b..802ff2344b2 100644 --- a/examples/celery_http_gateway/urls.py +++ b/examples/celery_http_gateway/urls.py @@ -1,6 +1,5 @@ from celery_http_gateway.tasks import hello_world -from django.conf.urls.defaults import (handler404, handler500, # noqa - include, patterns, url) +from django.conf.urls.defaults import handler404, handler500, include, patterns, url # noqa from djcelery import views as celery_views # Uncomment the next two lines to enable the admin: diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 11079a70d92..e73c0edb172 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -14,17 +14,13 @@ from celery.result import AsyncResult, GroupResult, ResultSet from . import tasks -from .conftest import (TEST_BACKEND, get_active_redis_channels, - get_redis_connection) -from .tasks import (ExpectedException, add, add_chord_to_chord, add_replaced, - add_to_all, add_to_all_to_chord, build_chain_inside_task, - collect_ids, delayed_sum, delayed_sum_with_soft_guard, - errback_new_style, errback_old_style, fail, fail_replaced, - identity, ids, print_unicode, raise_error, redis_count, - redis_echo, replace_with_chain, - replace_with_chain_which_raises, replace_with_empty_chain, - retry_once, return_exception, return_priority, - second_order_replace1, tsum, write_to_file_and_return_int) +from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection +from .tasks import (ExpectedException, add, add_chord_to_chord, add_replaced, add_to_all, add_to_all_to_chord, + build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, + errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, print_unicode, + raise_error, redis_count, redis_echo, replace_with_chain, replace_with_chain_which_raises, + replace_with_empty_chain, retry_once, return_exception, return_priority, second_order_replace1, + tsum, write_to_file_and_return_int) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 5596e2986bf..cc2c6761b7d 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -7,10 +7,8 @@ from celery import group from .conftest import get_active_redis_channels -from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, - add_ignore_result, add_not_typed, fail, print_unicode, - retry, retry_once, retry_once_priority, return_properties, - sleeping) +from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, + print_unicode, retry, retry_once, retry_once_priority, return_properties, sleeping) TIMEOUT = 10 diff --git a/t/unit/app/test_defaults.py b/t/unit/app/test_defaults.py index e105f2b49d2..649ca4aab7d 100644 --- a/t/unit/app/test_defaults.py +++ b/t/unit/app/test_defaults.py @@ -1,9 +1,8 @@ import sys from importlib import import_module -from celery.app.defaults import (_OLD_DEFAULTS, _OLD_SETTING_KEYS, - _TO_NEW_KEY, _TO_OLD_KEY, DEFAULTS, - NAMESPACES, SETTING_KEYS) +from celery.app.defaults import (_OLD_DEFAULTS, _OLD_SETTING_KEYS, _TO_NEW_KEY, _TO_OLD_KEY, DEFAULTS, NAMESPACES, + SETTING_KEYS) class test_defaults: diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py index 32440862bd2..60b46b5ee31 100644 --- a/t/unit/app/test_log.py +++ b/t/unit/app/test_log.py @@ -9,8 +9,7 @@ from celery import signals, uuid from celery.app.log import TaskFormatter -from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger, - get_task_logger, in_sighandler) +from celery.utils.log import ColorFormatter, LoggingProxy, get_logger, get_task_logger, in_sighandler from celery.utils.log import logger as base_logger from celery.utils.log import logger_isa, task_logger from t.unit import conftest diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index a8bed808a30..8f49b5963b0 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -8,8 +8,7 @@ import pytest import pytz -from celery.schedules import (ParseException, crontab, crontab_parser, - schedule, solar) +from celery.schedules import ParseException, crontab, crontab_parser, schedule, solar assertions = TestCase('__init__') diff --git a/t/unit/apps/test_multi.py b/t/unit/apps/test_multi.py index 4c3fd9bfc1f..a5c4c0e6c3a 100644 --- a/t/unit/apps/test_multi.py +++ b/t/unit/apps/test_multi.py @@ -7,8 +7,7 @@ import pytest import t.skip -from celery.apps.multi import (Cluster, MultiParser, NamespacedOptionParser, - Node, format_opt) +from celery.apps.multi import Cluster, MultiParser, NamespacedOptionParser, Node, format_opt class test_functions: diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index 203cbfdd534..d65fdf2a41f 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -9,10 +9,8 @@ import celery from celery import chord, group, signature, states, uuid from celery.app.task import Context, Task -from celery.backends.base import (BaseBackend, DisabledBackend, - KeyValueStoreBackend, _nulldict) -from celery.exceptions import (BackendGetMetaError, BackendStoreError, - ChordError, SecurityError, TimeoutError) +from celery.backends.base import BaseBackend, DisabledBackend, KeyValueStoreBackend, _nulldict +from celery.exceptions import BackendGetMetaError, BackendStoreError, ChordError, SecurityError, TimeoutError from celery.result import result_from_tuple from celery.utils import serialization from celery.utils.functional import pass1 diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index 28e2fedbbbb..c32440b2fe4 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -10,11 +10,9 @@ pytest.importorskip('sqlalchemy') -from celery.backends.database import (DatabaseBackend, retry, session, # noqa - session_cleanup) +from celery.backends.database import DatabaseBackend, retry, session, session_cleanup # noqa from celery.backends.database.models import Task, TaskSet # noqa -from celery.backends.database.session import ( # noqa - PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager) +from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager # noqa from t import skip # noqa diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py index f99fbc37a55..7a09812c526 100644 --- a/t/unit/backends/test_redis.py +++ b/t/unit/backends/test_redis.py @@ -12,8 +12,7 @@ from celery import signature, states, uuid from celery.canvas import Signature from celery.contrib.testing.mocks import ContextMock -from celery.exceptions import (BackendStoreError, ChordError, - ImproperlyConfigured) +from celery.exceptions import BackendStoreError, ChordError, ImproperlyConfigured from celery.result import AsyncResult, GroupResult from celery.utils.collections import AttributeDict from t.unit import conftest diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index 241dc93a0dc..10ed121278e 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -66,9 +66,7 @@ def Loader(*args, **kwargs): def test_process_initializer(self, _signals, set_mp_process_title, restore_logging): from celery import signals from celery._state import _tls - from celery.concurrency.prefork import (WORKER_SIGIGNORE, - WORKER_SIGRESET, - process_initializer) + from celery.concurrency.prefork import WORKER_SIGIGNORE, WORKER_SIGRESET, process_initializer on_worker_process_init = Mock() signals.worker_process_init.connect(on_worker_process_init) diff --git a/t/unit/conftest.py b/t/unit/conftest.py index 458e9a2ebf0..ecd843a4c44 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -20,11 +20,9 @@ # we have to import the pytest plugin fixtures here, # in case user did not do the `python setup.py develop` yet, # that installs the pytest plugin into the setuptools registry. -from celery.contrib.pytest import (celery_app, celery_enable_logging, - celery_parameters, depends_on_current_app) +from celery.contrib.pytest import celery_app, celery_enable_logging, celery_parameters, depends_on_current_app from celery.contrib.testing.app import TestApp, Trap -from celery.contrib.testing.mocks import (TaskMessage, TaskMessage1, - task_message_from_sig) +from celery.contrib.testing.mocks import TaskMessage, TaskMessage1, task_message_from_sig # Tricks flake8 into silencing redefining fixtures warnings. __all__ = ( diff --git a/t/unit/contrib/test_migrate.py b/t/unit/contrib/test_migrate.py index 2e395057462..6facf3b3419 100644 --- a/t/unit/contrib/test_migrate.py +++ b/t/unit/contrib/test_migrate.py @@ -7,12 +7,9 @@ from kombu.transport.virtual import QoS from kombu.utils.encoding import ensure_bytes -from celery.contrib.migrate import (State, StopFiltering, _maybe_queue, - expand_dest, filter_callback, - filter_status, migrate_task, - migrate_tasks, move, move_by_idmap, - move_by_taskmap, move_task_by_id, - start_filter, task_id_eq, task_id_in) +from celery.contrib.migrate import (State, StopFiltering, _maybe_queue, expand_dest, filter_callback, filter_status, + migrate_task, migrate_tasks, move, move_by_idmap, move_by_taskmap, + move_task_by_id, start_filter, task_id_eq, task_id_in) from t.unit import conftest # hack to ignore error at shutdown diff --git a/t/unit/events/test_state.py b/t/unit/events/test_state.py index 15ccd9a00f0..9522d32cfa9 100644 --- a/t/unit/events/test_state.py +++ b/t/unit/events/test_state.py @@ -9,8 +9,7 @@ from celery import states, uuid from celery.events import Event -from celery.events.state import (HEARTBEAT_DRIFT_MAX, HEARTBEAT_EXPIRE_WINDOW, - State, Task, Worker, heartbeat_expires) +from celery.events.state import HEARTBEAT_DRIFT_MAX, HEARTBEAT_EXPIRE_WINDOW, State, Task, Worker, heartbeat_expires class replay: diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 44938b1a04f..8cdcc5c416d 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -3,8 +3,7 @@ import pytest -from celery.fixups.django import (DjangoFixup, DjangoWorkerFixup, - FixupWarning, _maybe_close_fd, fixup) +from celery.fixups.django import DjangoFixup, DjangoWorkerFixup, FixupWarning, _maybe_close_fd, fixup from t.unit import conftest diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index bf9e60599c5..eefdef8797b 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -5,9 +5,8 @@ import pytest_subtests # noqa: F401 from celery._state import _task_stack -from celery.canvas import (Signature, _chain, _maybe_group, chain, chord, - chunks, group, maybe_signature, maybe_unroll_group, - signature, xmap, xstarmap) +from celery.canvas import (Signature, _chain, _maybe_group, chain, chord, chunks, group, maybe_signature, + maybe_unroll_group, signature, xmap, xstarmap) from celery.result import AsyncResult, EagerResult, GroupResult SIG = Signature({ diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 4e0975bbc75..6b288e9c557 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -9,10 +9,8 @@ from celery import states, uuid from celery.app.task import Context from celery.backends.base import SyncBackendMixin -from celery.exceptions import (ImproperlyConfigured, IncompleteStream, - TimeoutError) -from celery.result import (AsyncResult, EagerResult, GroupResult, ResultSet, - assert_will_not_block, result_from_tuple) +from celery.exceptions import ImproperlyConfigured, IncompleteStream, TimeoutError +from celery.result import AsyncResult, EagerResult, GroupResult, ResultSet, assert_will_not_block, result_from_tuple from celery.utils.serialization import pickle PYTRACEBACK = """\ diff --git a/t/unit/tasks/test_trace.py b/t/unit/tasks/test_trace.py index 55c106894bd..60fa253dda3 100644 --- a/t/unit/tasks/test_trace.py +++ b/t/unit/tasks/test_trace.py @@ -7,14 +7,10 @@ from celery import group, signals, states, uuid from celery.app.task import Context -from celery.app.trace import (TraceInfo, build_tracer, fast_trace_task, - get_log_policy, get_task_name, - log_policy_expected, log_policy_ignore, - log_policy_internal, log_policy_reject, - log_policy_unexpected, - reset_worker_optimizations, - setup_worker_optimizations, trace_task, - trace_task_ret, traceback_clear) +from celery.app.trace import (TraceInfo, build_tracer, fast_trace_task, get_log_policy, get_task_name, + log_policy_expected, log_policy_ignore, log_policy_internal, log_policy_reject, + log_policy_unexpected, reset_worker_optimizations, setup_worker_optimizations, + trace_task, trace_task_ret, traceback_clear) from celery.backends.base import BaseDictBackend from celery.backends.cache import CacheBackend from celery.exceptions import BackendGetMetaError, Ignore, Reject, Retry diff --git a/t/unit/utils/test_collections.py b/t/unit/utils/test_collections.py index be5f96d2ad2..ce776cebf1a 100644 --- a/t/unit/utils/test_collections.py +++ b/t/unit/utils/test_collections.py @@ -7,9 +7,8 @@ from billiard.einfo import ExceptionInfo import t.skip -from celery.utils.collections import (AttributeDict, BufferMap, - ConfigurationView, DictAttribute, - LimitedSet, Messagebuffer) +from celery.utils.collections import (AttributeDict, BufferMap, ConfigurationView, DictAttribute, LimitedSet, + Messagebuffer) from celery.utils.objects import Bunch diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index 721fd414a3e..b30cd6a6b14 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -4,10 +4,8 @@ import pytest_subtests # noqa: F401 from kombu.utils.functional import lazy -from celery.utils.functional import (DummyContext, first, firstmethod, - fun_accepts_kwargs, fun_takes_argument, - head_from_fun, lookahead, maybe_list, - mlazy, padlist, regen, seq_concat_item, +from celery.utils.functional import (DummyContext, first, firstmethod, fun_accepts_kwargs, fun_takes_argument, + head_from_fun, lookahead, maybe_list, mlazy, padlist, regen, seq_concat_item, seq_concat_seq) diff --git a/t/unit/utils/test_imports.py b/t/unit/utils/test_imports.py index a022be8addd..d3bcedf2234 100644 --- a/t/unit/utils/test_imports.py +++ b/t/unit/utils/test_imports.py @@ -3,8 +3,7 @@ import pytest -from celery.utils.imports import (NotAPackage, find_module, gen_task_name, - module_file, qualname, reload_from_cwd) +from celery.utils.imports import NotAPackage, find_module, gen_task_name, module_file, qualname, reload_from_cwd def test_find_module(): diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index 1c0a03d9893..b3c6cf572bf 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -11,15 +11,11 @@ import t.skip from celery import _find_option_with_arg, platforms from celery.exceptions import SecurityError, SecurityWarning -from celery.platforms import (ASSUMING_ROOT, ROOT_DISALLOWED, - ROOT_DISCOURAGED, DaemonContext, LockFailed, - Pidfile, _setgroups_hack, check_privileges, - close_open_fds, create_pidlock, detached, - fd_by_path, get_fdmax, ignore_errno, initgroups, - isatty, maybe_drop_privileges, parse_gid, - parse_uid, set_mp_process_title, set_pdeathsig, - set_process_title, setgid, setgroups, setuid, - signals) +from celery.platforms import (ASSUMING_ROOT, ROOT_DISALLOWED, ROOT_DISCOURAGED, DaemonContext, LockFailed, Pidfile, + _setgroups_hack, check_privileges, close_open_fds, create_pidlock, detached, + fd_by_path, get_fdmax, ignore_errno, initgroups, isatty, maybe_drop_privileges, + parse_gid, parse_uid, set_mp_process_title, set_pdeathsig, set_process_title, setgid, + setgroups, setuid, signals) from celery.utils.text import WhateverIO from t.unit import conftest diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py index bf83a0d68b5..1a4ca3b9d3a 100644 --- a/t/unit/utils/test_serialization.py +++ b/t/unit/utils/test_serialization.py @@ -8,11 +8,8 @@ import pytz from kombu import Queue -from celery.utils.serialization import (STRTOBOOL_DEFAULT_TABLE, - UnpickleableExceptionWrapper, - ensure_serializable, - get_pickleable_etype, jsonify, - strtobool) +from celery.utils.serialization import (STRTOBOOL_DEFAULT_TABLE, UnpickleableExceptionWrapper, ensure_serializable, + get_pickleable_etype, jsonify, strtobool) class test_AAPickle: diff --git a/t/unit/utils/test_text.py b/t/unit/utils/test_text.py index 659cc0b8007..1cfd8e162ca 100644 --- a/t/unit/utils/test_text.py +++ b/t/unit/utils/test_text.py @@ -1,7 +1,6 @@ import pytest -from celery.utils.text import (abbr, abbrtask, ensure_newlines, indent, - pretty, truncate) +from celery.utils.text import abbr, abbrtask, ensure_newlines, indent, pretty, truncate RANDTEXT = """\ The quick brown diff --git a/t/unit/utils/test_threads.py b/t/unit/utils/test_threads.py index 132f3504bc4..f31083be5f6 100644 --- a/t/unit/utils/test_threads.py +++ b/t/unit/utils/test_threads.py @@ -2,8 +2,7 @@ import pytest -from celery.utils.threads import (Local, LocalManager, _FastLocalStack, - _LocalStack, bgThread) +from celery.utils.threads import Local, LocalManager, _FastLocalStack, _LocalStack, bgThread from t.unit import conftest diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 99d75f6c4fc..98758c4d471 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -6,12 +6,9 @@ from pytz import AmbiguousTimeError from celery.utils.iso8601 import parse_iso8601 -from celery.utils.time import (LocalTimezone, delta_resolution, ffwd, - get_exponential_backoff_interval, - humanize_seconds, localize, make_aware, - maybe_iso8601, maybe_make_aware, - maybe_timedelta, rate, remaining, timezone, - utcoffset) +from celery.utils.time import (LocalTimezone, delta_resolution, ffwd, get_exponential_backoff_interval, + humanize_seconds, localize, make_aware, maybe_iso8601, maybe_make_aware, + maybe_timedelta, rate, remaining, timezone, utcoffset) class test_LocalTimezone: diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 0e7ce90818f..d63a9269b55 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -9,8 +9,7 @@ from celery.contrib.testing.mocks import ContextMock from celery.utils.collections import LimitedSet from celery.worker.consumer.agent import Agent -from celery.worker.consumer.consumer import (CANCEL_TASKS_BY_DEFAULT, CLOSE, - TERMINATE, Consumer) +from celery.worker.consumer.consumer import CANCEL_TASKS_BY_DEFAULT, CLOSE, TERMINATE, Consumer from celery.worker.consumer.gossip import Gossip from celery.worker.consumer.heart import Heart from celery.worker.consumer.mingle import Mingle diff --git a/t/unit/worker/test_loops.py b/t/unit/worker/test_loops.py index 2b2db226554..8a1fe63e4a0 100644 --- a/t/unit/worker/test_loops.py +++ b/t/unit/worker/test_loops.py @@ -8,8 +8,7 @@ from kombu.exceptions import DecodeError from celery.bootsteps import CLOSE, RUN -from celery.exceptions import (InvalidTaskError, WorkerLostError, - WorkerShutdown, WorkerTerminate) +from celery.exceptions import InvalidTaskError, WorkerLostError, WorkerShutdown, WorkerTerminate from celery.platforms import EX_FAILURE, EX_OK from celery.worker import state from celery.worker.consumer import Consumer diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index 2c49f777103..a34f70dc80d 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -12,13 +12,10 @@ from kombu.utils.uuid import uuid from celery import states -from celery.app.trace import (TraceInfo, build_tracer, fast_trace_task, - mro_lookup, reset_worker_optimizations, - setup_worker_optimizations, trace_task, - trace_task_ret) +from celery.app.trace import (TraceInfo, build_tracer, fast_trace_task, mro_lookup, reset_worker_optimizations, + setup_worker_optimizations, trace_task, trace_task_ret) from celery.backends.base import BaseDictBackend -from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, - TaskRevokedError, Terminated, WorkerLostError) +from celery.exceptions import Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, WorkerLostError from celery.signals import task_failure, task_retry, task_revoked from celery.worker import request as module from celery.worker import strategy diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index c6733e97d1c..93589fdbf5a 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -21,8 +21,7 @@ import t.skip from celery.bootsteps import CLOSE, RUN, TERMINATE, StartStopStep from celery.concurrency.base import BasePool -from celery.exceptions import (ImproperlyConfigured, InvalidTaskError, - TaskRevokedError, WorkerShutdown, +from celery.exceptions import (ImproperlyConfigured, InvalidTaskError, TaskRevokedError, WorkerShutdown, WorkerTerminate) from celery.platforms import EX_FAILURE from celery.utils.nodenames import worker_direct From 58873e3f850739a23b43819f95a90bc536bbc8aa Mon Sep 17 00:00:00 2001 From: Ruben Nielsen Date: Thu, 27 Jan 2022 08:52:00 +0100 Subject: [PATCH 0013/1051] Fix typo in CELERY_SERIALIZER docs CELERY_SERIALIZER setting did nothing, but CELERY_TASK_SERIALIZER behaved as expected --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 52797df39fe..95866b9bc74 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -145,7 +145,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_QUEUES`` :setting:`task_queues` ``CELERY_ROUTES`` :setting:`task_routes` ``CELERY_SEND_SENT_EVENT`` :setting:`task_send_sent_event` -``CELERY_SERIALIZER`` :setting:`task_serializer` +``CELERY_TASK_SERIALIZER`` :setting:`task_serializer` ``CELERYD_SOFT_TIME_LIMIT`` :setting:`task_soft_time_limit` ``CELERY_TASK_TRACK_STARTED`` :setting:`task_track_started` ``CELERY_TASK_REJECT_ON_WORKER_LOST`` :setting:`task_reject_on_worker_lost` From f01ba4848b7052ab7e64ea77b8314032912ebff4 Mon Sep 17 00:00:00 2001 From: Dmytro Litvinov Date: Thu, 27 Jan 2022 13:39:28 +0200 Subject: [PATCH 0014/1051] Update link to exponential backoff and jitter --- celery/utils/time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/time.py b/celery/utils/time.py index c898b90e93a..6b5813ebdf8 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -381,7 +381,7 @@ def get_exponential_backoff_interval( # Will be zero if factor equals 0 countdown = min(maximum, factor * (2 ** retries)) # Full jitter according to - # https://www.awsarchitectureblog.com/2015/03/backoff.html + # https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/ if full_jitter: countdown = random.randrange(countdown + 1) # Adjust according to maximum wait time and account for negative values. From 55401c947476a59efc0f34776cf907f6a20aeae7 Mon Sep 17 00:00:00 2001 From: uuip Date: Wed, 9 Feb 2022 21:17:17 +0800 Subject: [PATCH 0015/1051] Fix subscribed_to maybe empty (#7232) * Fix subscribed_to maybe empty * add comment about callback * add _reconnect_pubsub test cases * update comment from thedrow Co-authored-by: Omer Katz Co-authored-by: Omer Katz --- celery/backends/redis.py | 17 ++++++++++++----- t/unit/backends/test_redis.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/celery/backends/redis.py b/celery/backends/redis.py index a193181ba8f..056f2c0eff9 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -102,17 +102,24 @@ def _reconnect_pubsub(self): self.backend.client.connection_pool.reset() # task state might have changed when the connection was down so we # retrieve meta for all subscribed tasks before going into pubsub mode - metas = self.backend.client.mget(self.subscribed_to) - metas = [meta for meta in metas if meta] - for meta in metas: - self.on_state_change(self._decode_result(meta), None) + if self.subscribed_to: + metas = self.backend.client.mget(self.subscribed_to) + metas = [meta for meta in metas if meta] + for meta in metas: + self.on_state_change(self._decode_result(meta), None) self._pubsub = self.backend.client.pubsub( ignore_subscribe_messages=True, ) + # subscribed_to maybe empty after on_state_change if self.subscribed_to: self._pubsub.subscribe(*self.subscribed_to) else: - self._pubsub.ping() + self._pubsub.connection = self._pubsub.connection_pool.get_connection( + 'pubsub', self._pubsub.shard_hint + ) + # even if there is nothing to subscribe, we should not lose the callback after connecting. + # The on_connect callback will re-subscribe to any channels we previously subscribed to. + self._pubsub.connection.register_connect_callback(self._pubsub.on_connect) @contextmanager def reconnect_on_error(self): diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py index 7a09812c526..1643c165956 100644 --- a/t/unit/backends/test_redis.py +++ b/t/unit/backends/test_redis.py @@ -285,6 +285,35 @@ def test_drain_events_connection_error_no_patch(self): consumer.drain_events() consumer._pubsub.subscribe.assert_not_called() + def test__reconnect_pubsub_no_subscribed(self): + consumer = self.get_consumer() + consumer.start('initial') + consumer.subscribed_to = set() + consumer._reconnect_pubsub() + consumer.backend.client.mget.assert_not_called() + consumer._pubsub.subscribe.assert_not_called() + consumer._pubsub.connection.register_connect_callback.assert_called_once() + + def test__reconnect_pubsub_with_state_change(self): + meta = {'task_id': 'initial', 'status': states.SUCCESS} + consumer = self.get_consumer() + consumer.start('initial') + consumer.backend._set_with_state(b'celery-task-meta-initial', json.dumps(meta), states.SUCCESS) + consumer._reconnect_pubsub() + consumer.backend.client.mget.assert_called_once() + consumer._pubsub.subscribe.assert_not_called() + consumer._pubsub.connection.register_connect_callback.assert_called_once() + + def test__reconnect_pubsub_without_state_change(self): + meta = {'task_id': 'initial', 'status': states.STARTED} + consumer = self.get_consumer() + consumer.start('initial') + consumer.backend._set_with_state(b'celery-task-meta-initial', json.dumps(meta), states.SUCCESS) + consumer._reconnect_pubsub() + consumer.backend.client.mget.assert_called_once() + consumer._pubsub.subscribe.assert_called_once() + consumer._pubsub.connection.register_connect_callback.assert_not_called() + class basetest_RedisBackend: def get_backend(self): From cb1ed52988cf7ce500899aa0e3bb7758fbbcd3ad Mon Sep 17 00:00:00 2001 From: imdark Date: Sun, 13 Feb 2022 16:53:45 -0500 Subject: [PATCH 0016/1051] fixed slight typo --- celery/app/amqp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/amqp.py b/celery/app/amqp.py index 10747eed93b..777a1fc2c7c 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -597,7 +597,7 @@ def utc(self): @cached_property def _event_dispatcher(self): # We call Dispatcher.publish with a custom producer - # so don't need the diuspatcher to be enabled. + # so don't need the dispatcher to be enabled. return self.app.events.Dispatcher(enabled=False) def _handle_conf_update(self, *args, **kwargs): From c556648ee1762730c962e818ff40c9aa71fdf2d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20G=C3=B3mez?= Date: Mon, 14 Feb 2022 16:44:17 +0100 Subject: [PATCH 0017/1051] Fix: Celery beat sleeps 300 seconds sometimes even when it should run a task within a few seconds (e.g. 13 seconds) #7290 (#7291) * Fix error that allow sleep the beat process for the max interval value when must be execute the following task in 0.01 seconds * Add new unit test to check the case in left 0.01 seconds to execute the next test in the tick method * Add test for the new function is_numeric_value * Fix isort test_funcional.py --- celery/beat.py | 5 ++++- celery/utils/functional.py | 4 ++++ t/unit/app/test_beat.py | 7 +++++++ t/unit/utils/test_functional.py | 21 +++++++++++++++++++-- 4 files changed, 34 insertions(+), 3 deletions(-) diff --git a/celery/beat.py b/celery/beat.py index 74537e3469d..0cfa21559df 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -22,6 +22,7 @@ from . import __version__, platforms, signals from .exceptions import reraise from .schedules import crontab, maybe_schedule +from .utils.functional import is_numeric_value from .utils.imports import load_extension_class_names, symbol_by_name from .utils.log import get_logger, iter_open_logger_fds from .utils.time import humanize_seconds, maybe_make_aware @@ -361,7 +362,9 @@ def tick(self, event_t=event_t, min=min, heappop=heapq.heappop, else: heappush(H, verify) return min(verify[0], max_interval) - return min(adjust(next_time_to_run) or max_interval, max_interval) + adjusted_next_time_to_run = adjust(next_time_to_run) + return min(adjusted_next_time_to_run if is_numeric_value(adjusted_next_time_to_run) else max_interval, + max_interval) def schedules_equal(self, old_schedules, new_schedules): if old_schedules is new_schedules is None: diff --git a/celery/utils/functional.py b/celery/utils/functional.py index da866b75dc2..bcc15a3c788 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -389,3 +389,7 @@ def seq_concat_seq(a, b): if not isinstance(b, prefer): b = prefer(b) return a + b + + +def is_numeric_value(value): + return isinstance(value, (int, float)) and not isinstance(value, bool) diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 641c7b7a0b2..445aa28ed86 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -164,6 +164,7 @@ def is_due(self, last_run_at): always_due = mocked_schedule(True, 1) always_pending = mocked_schedule(False, 1) +always_pending_left_10_milliseconds = mocked_schedule(False, 0.01) class test_Scheduler: @@ -354,6 +355,12 @@ def test_pending_tick(self): schedule=always_pending) assert scheduler.tick() == 1 - 0.010 + def test_pending_left_10_milliseconds_tick(self): + scheduler = mScheduler(app=self.app) + scheduler.add(name='test_pending_left_10_milliseconds_tick', + schedule=always_pending_left_10_milliseconds) + assert scheduler.tick() == 0.010 - 0.010 + def test_honors_max_interval(self): scheduler = mScheduler(app=self.app) maxi = scheduler.max_interval diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index b30cd6a6b14..57055a14a6e 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -5,8 +5,8 @@ from kombu.utils.functional import lazy from celery.utils.functional import (DummyContext, first, firstmethod, fun_accepts_kwargs, fun_takes_argument, - head_from_fun, lookahead, maybe_list, mlazy, padlist, regen, seq_concat_item, - seq_concat_seq) + head_from_fun, is_numeric_value, lookahead, maybe_list, mlazy, padlist, regen, + seq_concat_item, seq_concat_seq) def test_DummyContext(): @@ -471,3 +471,20 @@ def test_accepts(self, fun): ]) def test_rejects(self, fun): assert not fun_accepts_kwargs(fun) + + +@pytest.mark.parametrize('value,expected', [ + (5, True), + (5.0, True), + (0, True), + (0.0, True), + (True, False), + ('value', False), + ('5', False), + ('5.0', False), + (None, False), +]) +def test_is_numeric_value(value, expected): + res = is_numeric_value(value) + assert type(res) is type(expected) + assert res == expected From a1d706c900973781173702c0fafd09d935b5d0e7 Mon Sep 17 00:00:00 2001 From: Michel Hua Date: Mon, 14 Feb 2022 09:28:18 +0100 Subject: [PATCH 0018/1051] Update canvas.rst --- docs/userguide/canvas.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 45912a6d2c9..81d9922e518 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -385,13 +385,13 @@ Here's some examples: .. code-block:: pycon >>> from celery import chord - >>> res = chord((add.s(i, i) for i in range(10)), xsum.s())() + >>> res = chord((add.s(i, i) for i in range(10)), tsum.s())() >>> res.get() 90 The above example creates 10 task that all start in parallel, and when all of them are complete the return values are combined - into a list and sent to the ``xsum`` task. + into a list and sent to the ``tsum`` task. The body of a chord can also be immutable, so that the return value of the group isn't passed on to the callback: @@ -434,7 +434,7 @@ Here's some examples: .. code-block:: pycon - >>> c3 = (group(add.s(i, i) for i in range(10)) | xsum.s()) + >>> c3 = (group(add.s(i, i) for i in range(10)) | tsum.s()) >>> res = c3() >>> res.get() 90 @@ -928,7 +928,7 @@ an errback to the chord callback: .. code-block:: pycon >>> c = (group(add.s(i, i) for i in range(10)) | - ... xsum.s().on_error(on_chord_error.s())).delay() + ... tsum.s().on_error(on_chord_error.s())).delay() Chords may have callback and errback signatures linked to them, which addresses some of the issues with linking signatures to groups. @@ -1025,7 +1025,7 @@ For example using ``map``: >>> from proj.tasks import add - >>> ~xsum.map([range(10), range(100)]) + >>> ~tsum.map([range(10), range(100)]) [45, 4950] is the same as having a task doing: @@ -1034,7 +1034,7 @@ is the same as having a task doing: @app.task def temp(): - return [xsum(range(10)), xsum(range(100))] + return [tsum(range(10)), tsum(range(100))] and using ``starmap``: From 239ed3c6aa1090a4a5931221c6a9fcbd9b1722a2 Mon Sep 17 00:00:00 2001 From: goldstar611 Date: Tue, 15 Feb 2022 10:53:38 -0600 Subject: [PATCH 0019/1051] Make instances of `parse_page` consistent PR https://github.com/celery/celery/pull/5578 introduced a documentation bug where the call to `parse_page` did not match the signature `def parse_page(url, page)` Later, in chaining tasks together we see that `parse_page` has been defined to have a single parameter. Then `parse_page` is re-defined again with 2 parameters, `url` and `page`. --- docs/userguide/tasks.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index cb1dd310630..2a86a5fe3b5 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1695,7 +1695,7 @@ Make your design asynchronous instead, for example by using *callbacks*. @app.task def update_page_info(url): page = fetch_page.delay(url).get() - info = parse_page.delay(url, page).get() + info = parse_page.delay(page).get() store_page_info.delay(url, info) @app.task @@ -1748,7 +1748,7 @@ enabling subtasks to run synchronously is not recommended! @app.task def update_page_info(url): page = fetch_page.delay(url).get(disable_sync_subtasks=False) - info = parse_page.delay(url, page).get(disable_sync_subtasks=False) + info = parse_page.delay(page).get(disable_sync_subtasks=False) store_page_info.delay(url, info) @app.task @@ -1756,7 +1756,7 @@ enabling subtasks to run synchronously is not recommended! return myhttplib.get(url) @app.task - def parse_page(url, page): + def parse_page(page): return myparser.parse_document(page) @app.task From 744ef43c4d83e190d85c034a8e5cb4ca7b7a22e0 Mon Sep 17 00:00:00 2001 From: Tizian Seehaus <38123657+tibotix@users.noreply.github.com> Date: Tue, 22 Feb 2022 14:36:55 +0100 Subject: [PATCH 0020/1051] Add `security_key_password` option (#7292) * Expose password argument on PrivateKey * Added base.setup_security `security_key_password` keyword argument * Added Option for `security_key_password` * Ensure Bytes on PrivateKey password argument * Added Documentation for `security_key_password` usage * Added tests for `security_key_password` * Updated CONTRIBUTORS.txt * [fix] Updated `versionadded` to 5.3.0 --- CONTRIBUTORS.txt | 1 + celery/app/base.py | 6 ++-- celery/app/defaults.py | 1 + celery/security/__init__.py | 5 ++-- celery/security/key.py | 2 +- celery/security/serialization.py | 4 +-- docs/userguide/configuration.rst | 13 +++++++++ docs/userguide/security.rst | 2 ++ t/unit/app/test_app.py | 4 +-- t/unit/security/__init__.py | 40 +++++++++++++++++++++++++++ t/unit/security/test_key.py | 8 +++++- t/unit/security/test_security.py | 25 +++++++++++++++-- t/unit/security/test_serialization.py | 2 +- 13 files changed, 99 insertions(+), 14 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 1c497349f54..fc6b8d4b874 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -286,3 +286,4 @@ Patrick Zhang, 2017/08/19 Konstantin Kochin, 2021/07/11 kronion, 2021/08/26 Gabor Boros, 2021/11/09 +Tizian Seehaus, 2022/02/09 \ No newline at end of file diff --git a/celery/app/base.py b/celery/app/base.py index bd222651b4d..cf2a3ac3671 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -604,7 +604,7 @@ def config_from_cmdline(self, argv, namespace='celery'): self.loader.cmdline_config_parser(argv, namespace) ) - def setup_security(self, allowed_serializers=None, key=None, cert=None, + def setup_security(self, allowed_serializers=None, key=None, key_password=None, cert=None, store=None, digest=DEFAULT_SECURITY_DIGEST, serializer='json'): """Setup the message-signing serializer. @@ -620,6 +620,8 @@ def setup_security(self, allowed_serializers=None, key=None, cert=None, content_types that should be exempt from being disabled. key (str): Name of private key file to use. Defaults to the :setting:`security_key` setting. + key_password (bytes): Password to decrypt the private key. + Defaults to the :setting:`security_key_password` setting. cert (str): Name of certificate file to use. Defaults to the :setting:`security_certificate` setting. store (str): Directory containing certificates. @@ -631,7 +633,7 @@ def setup_security(self, allowed_serializers=None, key=None, cert=None, the serializers supported. Default is ``json``. """ from celery.security import setup_security - return setup_security(allowed_serializers, key, cert, + return setup_security(allowed_serializers, key, key_password, cert, store, digest, serializer, app=self) def autodiscover_tasks(self, packages=None, diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 596c750f2b5..1015c27892a 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -226,6 +226,7 @@ def __repr__(self): certificate=Option(type='string'), cert_store=Option(type='string'), key=Option(type='string'), + key_password=Option(type='bytes'), digest=Option(DEFAULT_SECURITY_DIGEST, type='string'), ), database=Namespace( diff --git a/celery/security/__init__.py b/celery/security/__init__.py index 8b7f74cc407..c801d98b1df 100644 --- a/celery/security/__init__.py +++ b/celery/security/__init__.py @@ -41,7 +41,7 @@ raise ImproperlyConfigured(CRYPTOGRAPHY_NOT_INSTALLED) -def setup_security(allowed_serializers=None, key=None, cert=None, store=None, +def setup_security(allowed_serializers=None, key=None, key_password=None, cert=None, store=None, digest=None, serializer='json', app=None): """See :meth:`@Celery.setup_security`.""" if app is None: @@ -56,6 +56,7 @@ def setup_security(allowed_serializers=None, key=None, cert=None, store=None, raise ImproperlyConfigured(SETTING_MISSING) key = key or conf.security_key + key_password = key_password or conf.security_key_password cert = cert or conf.security_certificate store = store or conf.security_cert_store digest = digest or conf.security_digest @@ -65,7 +66,7 @@ def setup_security(allowed_serializers=None, key=None, cert=None, store=None, with open(key) as kf: with open(cert) as cf: - register_auth(kf.read(), cf.read(), store, digest, serializer) + register_auth(kf.read(), key_password, cf.read(), store, digest, serializer) registry._set_default_serializer('auth') diff --git a/celery/security/key.py b/celery/security/key.py index 939d501fa80..2c4882b6f80 100644 --- a/celery/security/key.py +++ b/celery/security/key.py @@ -18,7 +18,7 @@ def __init__(self, key, password=None): ): self._key = serialization.load_pem_private_key( ensure_bytes(key), - password=password, + password=ensure_bytes(password), backend=default_backend()) def sign(self, data, digest): diff --git a/celery/security/serialization.py b/celery/security/serialization.py index 7284feb1886..c58ef906542 100644 --- a/celery/security/serialization.py +++ b/celery/security/serialization.py @@ -88,11 +88,11 @@ def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): } -def register_auth(key=None, cert=None, store=None, +def register_auth(key=None, key_password=None, cert=None, store=None, digest=DEFAULT_SECURITY_DIGEST, serializer='json'): """Register security serializer.""" - s = SecureSerializer(key and PrivateKey(key), + s = SecureSerializer(key and PrivateKey(key, password=key_password), cert and Certificate(cert), store and FSCertStore(store), digest, serializer=serializer) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 95866b9bc74..74f52895920 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -126,6 +126,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_SECURITY_CERTIFICATE`` :setting:`security_certificate` ``CELERY_SECURITY_CERT_STORE`` :setting:`security_cert_store` ``CELERY_SECURITY_KEY`` :setting:`security_key` +``CELERY_SECURITY_KEY_PASSWORD`` :setting:`security_key_password` ``CELERY_ACKS_LATE`` :setting:`task_acks_late` ``CELERY_ACKS_ON_FAILURE_OR_TIMEOUT`` :setting:`task_acks_on_failure_or_timeout` ``CELERY_ALWAYS_EAGER`` :setting:`task_always_eager` @@ -3141,6 +3142,18 @@ Default: :const:`None`. The relative or absolute path to a file containing the private key used to sign messages when :ref:`message-signing` is used. +.. setting:: security_key_password + +``security_key_password`` +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None`. + +.. versionadded:: 5.3.0 + +The password used to decrypt the private key when :ref:`message-signing` +is used. + .. setting:: security_certificate ``security_certificate`` diff --git a/docs/userguide/security.rst b/docs/userguide/security.rst index ba4d04eb086..48d7d991afb 100644 --- a/docs/userguide/security.rst +++ b/docs/userguide/security.rst @@ -162,6 +162,8 @@ the :setting:`security_key`, :setting:`security_certificate`, and :setting:`security_cert_store` settings respectively. You can tweak the signing algorithm with :setting:`security_digest`. +If using an encrypted private key, the password can be configured with +:setting:`security_key_password`. With these configured it's also necessary to call the :func:`celery.setup_security` function. Note that this will also diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index ed61b0f8356..cb68b5b69ef 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -113,9 +113,9 @@ def test_set_default(self, set_default_app): @patch('celery.security.setup_security') def test_setup_security(self, setup_security): self.app.setup_security( - {'json'}, 'key', 'cert', 'store', 'digest', 'serializer') + {'json'}, 'key', None, 'cert', 'store', 'digest', 'serializer') setup_security.assert_called_with( - {'json'}, 'key', 'cert', 'store', 'digest', 'serializer', + {'json'}, 'key', None, 'cert', 'store', 'digest', 'serializer', app=self.app) def test_task_autofinalize_disabled(self): diff --git a/t/unit/security/__init__.py b/t/unit/security/__init__.py index 6e0124a8fcb..feec8ba4d97 100644 --- a/t/unit/security/__init__.py +++ b/t/unit/security/__init__.py @@ -4,6 +4,8 @@ Generated with `extra/security/get-cert.sh` """ +KEYPASSWORD = b"samplepassword" + KEY1 = """-----BEGIN RSA PRIVATE KEY----- MIICXQIBAAKBgQC9Twh0V5q/R1Q8N+Y+CNM4lj9AXeZL0gYowoK1ht2ZLCDU9vN5 dhV0x3sqaXLjQNeCGd6b2vTbFGdF2E45//IWz6/BdPFWaPm0rtYbcxZHqXDZScRp @@ -20,6 +22,25 @@ xqkQQn+UgBtOemRXpFCuKaoXonA3nLeB54SWcC6YUOcR -----END RSA PRIVATE KEY-----""" +ENCKEY1 = """-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIC3TBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIfSuXbPVZsP8CAggA +MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBBP/mVP1cCpfTpoJZuSKRrnBIIC +gMKyrj4mzdr0xASR4120M3mh56+1dUDvLJl0DwOXD5NGCQfvSgDP0mGSrmIcM6Rh +O9oePFj81IjHoGQNVgFNhd8Lc1R7xe51Vk8M3VfCOnPwWzuBzGe8vlgyfzKRVhgo +vb633pZR721xcPCK08aEXcsLwXrMGpp/EtHtpJD7MwqVFOhUjcUhKWNa7icFkVR1 +fzL6CC24CjsJWFz8esdJUNwGJv2vcYcoYYcIkVX5s1riSemhUmPCVTvT1Rvl2yTE +T2oHWCCMD5lhd+gcsSlcK/PlUY9J5GMJd61w+uD2A5qVOzOHDIRIwjRUbGpS2feL +1rWUjBbF8YF8mUp1cYdJSjKE9ro2qZbbFRLB+il3FLimjb1yFEAEItQzR123loJ6 +cTrQEg9WZmLTwrxsOx54bYR6CGBU1fpVkpeR95xYtKyhfK1RD03Aj6ffcDiaJH73 +lodf+ObBORYMYBi6E0AJvv2HNJHaZVzmj+ynzeTV6rfUyP075YZjS5XoRYKCOQz6 +HcssJUeGT+voPTbf67AO/clJDgOBn82fa8eIMGibgQARtOcEuhac9Gl4R2whfbdp +DkODqVKiqHCgO5qxGxmE/cEZpa7+j6Q8YTVWlvGdDtBQK4+NB1hHgnsPsG9RLjWy +Z7Ch/UjkmMxNGnvwWb9Xaq56ZqOmQGmoet+v9OLXAKZwZMRaURuJffxbd+YrexnE +LF9xV1b+w1taLrGCNn8yLDJY9G/T9zsH6eGjZslT9MPLlxq4PaL7WysKGhOt2+Vw +beQ4tDVmjlJjODOyaygt0wwzEght02lZmGhL88S35hfWpyskcWzGfbYkGqJVxY5E +i8wow1MqvPUQdKWNPgPGd04= +-----END ENCRYPTED PRIVATE KEY-----""" + KEY2 = """-----BEGIN RSA PRIVATE KEY----- MIICXQIBAAKBgQDH22L8b9AmST9ABDmQTQ2DWMdDmK5YXZt4AIY81IcsTQ/ccM0C fwXEP9tdkYwtcxMCWdASwY5pfMy9vFp0hyrRQMSNfuoxAgONuNWPyQoIvY3ZXRe6 @@ -36,6 +57,25 @@ Fxeq/HOp9JYw4gRu6Ycvqu57KHwpHhR0FCXRBxuYcJ5V -----END RSA PRIVATE KEY-----""" +ENCKEY2 = """-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIC3TBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIbWgdUR8UE/cCAggA +MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBA50e1NvEUQXLkA44V4wVeOBIIC +gBt+cRTT+Jqrayj1hSrKgD20mNKz0qo6/JsXwTcHQJLQ91KFWDkAfCYOazzzIlIx +/rsJqz6IY1LckwL2Rtls3hp4+tNPD4AregtadMKgJj5lOyX1RYGdbkjTkhymMKKo +3f5sayoIXkOovT9qADKGjVaHL2tmc5hYJhtNHGKiy+CqraN+h8fOsZsSJDLoWCZV +iSC2rXBsWvqq0ItBEeJhvoCqzOg+ZL7SNrHez6/g8de8xob9eLXZMw6CWiZJ6NJa +mcBMIw+ep6nfZ53rQd/5N5T5B4b0EYK+DM8eypqljbc81IvKvPc3HsoU/TFC+3XW +2qoaQVbsZu8kOyY7xqR/MO3H2klRAVIEBgzqU/ZGl0abLyn7PcV4et8ld8zfwR1c +0Whpq+9kN5O1RWIKU/CU4Xx2WwBLklnqV9U8rHF6FGcSi62rCzkv6GhHpoO6wi3w +vP08ACHMa4of/WJhqKmBic9Q3IMf77irJRS7cqkwkjr7mIzazQvouQCHma5y5moQ +x1XfkX3U7qZwdCOtDcfFVLfeWnY7iEbeoMKJu/siJAkbWI45jRLANQMn6Y4nu3oS +S+XeYxmDBV0JJEBkaTuck9rb0X9TU+Ms6pGvTXTt4r2jz+GUVuFDHCp3MlRD64tb +d1VBresyllIFF39adeKyVeW+pp3q1fd2N7pNKo+oDiIg+rDwNtvA9sX10j6gh8Wp +LZZYJpiMpmof/eMMm6LTgjoJ+PZHRGtR1B8VF5RtuNioDWvpQAvnJS5cG1IjD7Sq +Q0EqU7r50YZJbDqA67dpHeC4iDxYoANbX8BP5E9fD1yEQGkEXmsogj5SokjqR2ef +iXQ8ER5I8IKAr2KjDXTJyZg= +-----END ENCRYPTED PRIVATE KEY-----""" + CERT1 = """-----BEGIN CERTIFICATE----- MIICVzCCAcACCQC72PP7b7H9BTANBgkqhkiG9w0BAQUFADBwMQswCQYDVQQGEwJV UzELMAkGA1UECBMCQ0ExCzAJBgNVBAcTAlNGMQ8wDQYDVQQKEwZDZWxlcnkxDzAN diff --git a/t/unit/security/test_key.py b/t/unit/security/test_key.py index 53c06a0409a..ffa52925bde 100644 --- a/t/unit/security/test_key.py +++ b/t/unit/security/test_key.py @@ -5,7 +5,7 @@ from celery.security.key import PrivateKey from celery.security.utils import get_digest_algorithm -from . import CERT1, KEY1, KEY2 +from . import CERT1, ENCKEY1, ENCKEY2, KEY1, KEY2, KEYPASSWORD from .case import SecurityCase @@ -14,6 +14,8 @@ class test_PrivateKey(SecurityCase): def test_valid_private_key(self): PrivateKey(KEY1) PrivateKey(KEY2) + PrivateKey(ENCKEY1, KEYPASSWORD) + PrivateKey(ENCKEY2, KEYPASSWORD) def test_invalid_private_key(self): with pytest.raises((SecurityError, TypeError)): @@ -24,6 +26,10 @@ def test_invalid_private_key(self): PrivateKey('foo') with pytest.raises(SecurityError): PrivateKey(KEY1[:20] + KEY1[21:]) + with pytest.raises(SecurityError): + PrivateKey(ENCKEY1, KEYPASSWORD+b"wrong") + with pytest.raises(SecurityError): + PrivateKey(ENCKEY2, KEYPASSWORD+b"wrong") with pytest.raises(SecurityError): PrivateKey(CERT1) diff --git a/t/unit/security/test_security.py b/t/unit/security/test_security.py index 0b75ffc3619..0559919997e 100644 --- a/t/unit/security/test_security.py +++ b/t/unit/security/test_security.py @@ -27,7 +27,7 @@ from celery.security.utils import reraise_errors from t.unit import conftest -from . import CERT1, KEY1 +from . import CERT1, ENCKEY1, KEY1, KEYPASSWORD from .case import SecurityCase @@ -84,6 +84,25 @@ def test_setup_security(self): os.remove(tmp_key1.name) os.remove(tmp_cert1.name) + def test_setup_security_encrypted_key_file(self): + with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_key1: + tmp_key1.write(ENCKEY1) + with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_cert1: + tmp_cert1.write(CERT1) + + self.app.conf.update( + task_serializer='auth', + accept_content=['auth'], + security_key=tmp_key1.name, + security_key_password=KEYPASSWORD, + security_certificate=tmp_cert1.name, + security_cert_store='*.pem', + ) + self.app.setup_security() + + os.remove(tmp_key1.name) + os.remove(tmp_cert1.name) + def test_setup_security_disabled_serializers(self): disabled = registry._disabled_content_types assert len(disabled) == 0 @@ -123,9 +142,9 @@ def effect(*args): with conftest.open(side_effect=effect): with patch('celery.security.registry') as registry: store = Mock() - self.app.setup_security(['json'], key, cert, store) + self.app.setup_security(['json'], key, None, cert, store) dis.assert_called_with(['json']) - reg.assert_called_with('A', 'B', store, 'sha256', 'json') + reg.assert_called_with('A', None, 'B', store, 'sha256', 'json') registry._set_default_serializer.assert_called_with('auth') def test_security_conf(self): diff --git a/t/unit/security/test_serialization.py b/t/unit/security/test_serialization.py index 51925c487b7..6caf3857b81 100644 --- a/t/unit/security/test_serialization.py +++ b/t/unit/security/test_serialization.py @@ -55,7 +55,7 @@ def test_separate_ends(self): assert s2.deserialize(s1.serialize('foo')) == 'foo' def test_register_auth(self): - register_auth(KEY1, CERT1, '') + register_auth(KEY1, None, CERT1, '') assert 'application/data' in registry._decoders def test_lots_of_sign(self): From 9644ea2cca16b3b39bb848d1cb513fccbae9b071 Mon Sep 17 00:00:00 2001 From: Mark Byrne <31762852+mbyrnepr2@users.noreply.github.com> Date: Tue, 22 Feb 2022 15:06:29 +0100 Subject: [PATCH 0021/1051] Small documentation update: `task` -> `worker` (#7307) --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 74f52895920..a073d1fa10a 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2314,7 +2314,7 @@ becomes:: w1@example.com.dq -Then you can route the task to the task by specifying the hostname +Then you can route the task to the worker by specifying the hostname as the routing key and the ``C.dq`` exchange:: task_routes = { From 8423c6743e9ccb7d8825442efff23779cdd411e5 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 22 Feb 2022 16:10:25 +0200 Subject: [PATCH 0022/1051] Limit elasticsearch support to below version 8.0. --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 174c3f8b3a7..79b70ac0eb7 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1 +1 @@ -elasticsearch +elasticsearch<8.0 From c925f380218db3e0b32291c79d8a4915bf07e07e Mon Sep 17 00:00:00 2001 From: VojtechH <4881563+VojtechH@users.noreply.github.com> Date: Thu, 3 Feb 2022 23:09:21 +0100 Subject: [PATCH 0023/1051] Update example in docs Update the first example in the Connections so that it corresponds to the second example. --- docs/userguide/calling.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 8bfe52feef4..06f0879c5cb 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -673,13 +673,13 @@ publisher: .. code-block:: python - + numbers = [(2, 2), (4, 4), (8, 8), (16, 16)] results = [] with add.app.pool.acquire(block=True) as connection: with add.get_publisher(connection) as publisher: try: - for args in numbers: - res = add.apply_async((2, 2), publisher=publisher) + for i, j in numbers: + res = add.apply_async((i, j), publisher=publisher) results.append(res) print([res.get() for res in results]) From 095cd7825182b7306a65480d0bfdf077428287fb Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 22 Feb 2022 22:28:14 +0200 Subject: [PATCH 0024/1051] Add Github Sponsors to funding.yml. --- .github/FUNDING.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 5748c519985..55c5ce97aa7 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,6 +1,6 @@ # These are supported funding model platforms -github: +github: celery patreon: open_collective: celery ko_fi: # Replace with a single Ko-fi username From 185d3780e4a604524d9ca1fef08e38b7f69fb51f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 1 Mar 2022 14:03:41 +0600 Subject: [PATCH 0025/1051] try new major release of pytest 7 (#7330) * try new major release of pytest 7 * fix typo --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 90c84b1996e..406e5f6f50f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,7 +1,7 @@ -pytest~=6.2 +pytest~=7.0.1 pytest-celery pytest-subtests -pytest-timeout~=1.4.2 +pytest-timeout~=2.1.0 boto3>=1.9.178 moto>=2.2.6 pre-commit From df7795f5875c11ff6a8e30c2d285e6a8946b4f18 Mon Sep 17 00:00:00 2001 From: Mads Jensen Date: Tue, 1 Mar 2022 11:09:21 +0100 Subject: [PATCH 0026/1051] Fix typo in feature request issue template. (#7331) --- .github/ISSUE_TEMPLATE/Feature-Request.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/Feature-Request.md b/.github/ISSUE_TEMPLATE/Feature-Request.md index d6ee2e8fa43..eda05bfbb4f 100644 --- a/.github/ISSUE_TEMPLATE/Feature-Request.md +++ b/.github/ISSUE_TEMPLATE/Feature-Request.md @@ -18,7 +18,7 @@ To check an item on the list replace [ ] with [x]. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?utf8=%E2%9C%93&q=is%3Apr+label%3A%22PR+Type%3A+Feature%22+) for existing proposed implementations of this feature. - [ ] I have checked the [commit log](https://github.com/celery/celery/commits/master) - to find out if the if the same feature was already implemented in the + to find out if the same feature was already implemented in the master branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). From facf605897c7843b429538d13da51f424d7a906c Mon Sep 17 00:00:00 2001 From: Mads Jensen Date: Wed, 2 Mar 2022 09:25:33 +0100 Subject: [PATCH 0027/1051] Remove unneeded from __future__ imports in celery.contrib.abortable. --- celery/contrib/abortable.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/celery/contrib/abortable.py b/celery/contrib/abortable.py index 75b4d2546d5..8cb164d7bf0 100644 --- a/celery/contrib/abortable.py +++ b/celery/contrib/abortable.py @@ -27,8 +27,6 @@ .. code-block:: python - from __future__ import absolute_import - from celery.contrib.abortable import AbortableTask from celery.utils.log import get_task_logger @@ -56,8 +54,6 @@ def long_running_task(self): .. code-block:: python - from __future__ import absolute_import - import time from proj.tasks import MyLongRunningTask From 4de59d8a0962b709a926274ac3b66821ece4af44 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Thu, 3 Mar 2022 13:20:28 +0200 Subject: [PATCH 0028/1051] `broker_connection_retry` should no longer apply on startup (#7300) * `broker_connection_retry` should no longer apply on startup. * Add documentation. * Fix deprecation message. * Log a critical log entry before performing a warm worker shutdown. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Log a critical entry before performing an immediate worker termination. * Remove unnecessary critical logs * Test that ensure_connected throws a deprecation warning if deprecated_no_retry_on_startup. * Test WorkerShutdowns in consumer start method. * Test WorkerTerminate in consumer start method. * Test to assert that blueprint restarts when state is not in stop conditions. * Parametrize broker_connection_retry in blueprint restart test. * Removing this test as it was replaced with test_too_many_open_files_raises_error * Warn of deprecation when broker_connection_retry_on_startup is undefined. * Combine ensure_connected tests. * Test that connection is not retried when connect() raises an error and retry is disabled. * Happify lint Co-authored-by: Naomi Elstein Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/app/defaults.py | 1 + celery/worker/consumer/consumer.py | 49 +++++++++++--- docs/userguide/configuration.rst | 23 ++++++- t/unit/worker/test_consumer.py | 104 +++++++++++++++++++++++++---- 4 files changed, 153 insertions(+), 24 deletions(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 1015c27892a..102302f66cc 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -87,6 +87,7 @@ def __repr__(self): transport_options=Option({}, type='dict'), connection_timeout=Option(4, type='float'), connection_retry=Option(True, type='bool'), + connection_retry_on_startup=Option(None, type='bool'), connection_max_retries=Option(100, type='int'), failover_strategy=Option(None, type='string'), heartbeat=Option(120, type='int'), diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index d59f64a88a8..f12753f741f 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -22,7 +22,8 @@ from celery import bootsteps, signals from celery.app.trace import build_tracer -from celery.exceptions import CPendingDeprecationWarning, InvalidTaskError, NotRegistered +from celery.exceptions import (CPendingDeprecationWarning, InvalidTaskError, NotRegistered, WorkerShutdown, + WorkerTerminate) from celery.utils.functional import noop from celery.utils.log import get_logger from celery.utils.nodenames import gethostname @@ -323,12 +324,21 @@ def start(self): try: blueprint.start(self) except self.connection_errors as exc: - # If we're not retrying connections, no need to catch - # connection errors - if not self.app.conf.broker_connection_retry: - raise + # If we're not retrying connections, we need to properly shutdown or terminate + # the Celery main process instead of abruptly aborting the process without any cleanup. + is_connection_loss_on_startup = self.restart_count == 0 + connection_retry_type = self._get_connection_retry_type(is_connection_loss_on_startup) + connection_retry = self.app.conf[connection_retry_type] + if not connection_retry: + crit( + f"Retrying to {'establish' if is_connection_loss_on_startup else 're-establish'} " + f"a connection to the message broker after a connection loss has " + f"been disabled (app.conf.{connection_retry_type}=False). Shutting down..." + ) + raise WorkerShutdown(1) from exc if isinstance(exc, OSError) and exc.errno == errno.EMFILE: - raise # Too many open files + crit("Too many open files. Aborting...") + raise WorkerTerminate(1) from exc maybe_shutdown() if blueprint.state not in STOP_CONDITIONS: if self.connection: @@ -338,6 +348,12 @@ def start(self): self.on_close() blueprint.restart(self) + def _get_connection_retry_type(self, is_connection_loss_on_startup): + return ('broker_connection_retry_on_startup' + if (is_connection_loss_on_startup + and self.app.conf.broker_connection_retry_on_startup is not None) + else 'broker_connection_retry') + def on_connection_error_before_connected(self, exc): error(CONNECTION_ERROR, self.conninfo.as_uri(), exc, 'Trying to reconnect...') @@ -442,10 +458,25 @@ def _error_handler(exc, interval, next_step=CONNECTION_RETRY_STEP): max_retries=self.app.conf.broker_connection_max_retries) error(CONNECTION_ERROR, conn.as_uri(), exc, next_step) - # remember that the connection is lazy, it won't establish + # Remember that the connection is lazy, it won't establish # until needed. - if not self.app.conf.broker_connection_retry: - # retry disabled, just call connect directly. + # If broker_connection_retry_on_startup is not set, revert to broker_connection_retry + # to determine whether connection retries are disabled. + + # TODO: Rely only on broker_connection_retry_on_startup to determine whether connection retries are disabled. + # We will make the switch in Celery 6.0. + + if self.app.conf.broker_connection_retry_on_startup is None: + warnings.warn( + CPendingDeprecationWarning( + f"The broker_connection_retry configuration setting will no longer determine\n" + f"whether broker connection retries are made during startup in Celery 6.0 and above.\n" + f"If you wish to retain the existing behavior for retrying connections on startup,\n" + f"you should set broker_connection_retry_on_startup to {self.app.conf.broker_connection_retry}.") + ) + + if not self.app.conf.broker_connection_retry and not self.app.conf.broker_connection_retry_on_startup: + # Retry disabled, just call connect directly. conn.connect() return conn diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index a073d1fa10a..50dbf4d9394 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2619,7 +2619,28 @@ gevent. Default: Enabled. -Automatically try to re-establish the connection to the AMQP broker if lost. +Automatically try to re-establish the connection to the AMQP broker if lost +after the initial connection is made. + +The time between retries is increased for each retry, and is +not exhausted before :setting:`broker_connection_max_retries` is +exceeded. + +.. warning:: + + The broker_connection_retry configuration setting will no longer determine + whether broker connection retries are made during startup in Celery 6.0 and above. + If you wish to refrain from retrying connections on startup, + you should set broker_connection_retry_on_startup to False instead. + +.. setting:: broker_connection_retry_on_startup + +``broker_connection_retry_on_startup`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: Enabled. + +Automatically try to establish the connection to the AMQP broker on Celery startup if it is unavailable. The time between retries is increased for each retry, and is not exhausted before :setting:`broker_connection_max_retries` is diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index d63a9269b55..86619c1113b 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -6,7 +6,9 @@ import pytest from billiard.exceptions import RestartFreqExceeded +from celery import bootsteps from celery.contrib.testing.mocks import ContextMock +from celery.exceptions import WorkerShutdown, WorkerTerminate from celery.utils.collections import LimitedSet from celery.worker.consumer.agent import Agent from celery.worker.consumer.consumer import CANCEL_TASKS_BY_DEFAULT, CLOSE, TERMINATE, Consumer @@ -17,8 +19,7 @@ from celery.worker.state import active_requests -class test_Consumer: - +class ConsumerTestCase: def get_consumer(self, no_hub=False, **kwargs): consumer = Consumer( on_task_request=Mock(), @@ -37,6 +38,9 @@ def get_consumer(self, no_hub=False, **kwargs): consumer.conninfo = consumer.connection return consumer + +class test_Consumer(ConsumerTestCase): + def test_repr(self): assert repr(self.get_consumer()) @@ -160,14 +164,6 @@ def test_post_eta(self): bucket.add.assert_called_with((request, 1)) reserv.assert_called_with(bucket) - def test_start_blueprint_raises_EMFILE(self): - c = self.get_consumer() - exc = c.blueprint.start.side_effect = OSError() - exc.errno = errno.EMFILE - - with pytest.raises(OSError): - c.start() - def test_max_restarts_exceeded(self): c = self.get_consumer() @@ -210,11 +206,12 @@ def bp_start(*args, **kwargs): c.blueprint.start.assert_called_once_with(c) - def test_no_retry_raises_error(self): - self.app.conf.broker_connection_retry = False + def test_too_many_open_files_raises_error(self): c = self.get_consumer() - c.blueprint.start.side_effect = socket.error() - with pytest.raises(socket.error): + err = OSError() + err.errno = errno.EMFILE + c.blueprint.start.side_effect = err + with pytest.raises(WorkerTerminate): c.start() def _closer(self, c): @@ -222,6 +219,25 @@ def se(*args, **kwargs): c.blueprint.state = CLOSE return se + @pytest.mark.parametrize("broker_connection_retry", [True, False]) + def test_blueprint_restart_when_state_not_in_stop_conditions(self, broker_connection_retry): + c = self.get_consumer() + + # ensure that WorkerShutdown is not raised + c.app.conf['broker_connection_retry'] = broker_connection_retry + c.app.conf['broker_connection_retry_on_startup'] = True + c.restart_count = -1 + + # ensure that blueprint state is not in stop conditions + c.blueprint.state = bootsteps.RUN + c.blueprint.start.side_effect = ConnectionError() + + # stops test from running indefinitely in the while loop + c.blueprint.restart.side_effect = self._closer(c) + + c.start() + c.blueprint.restart.assert_called_once() + def test_collects_at_restart(self): c = self.get_consumer() c.connection.collect.side_effect = MemoryError() @@ -306,6 +322,66 @@ def test_cancel_long_running_tasks_on_connection_loss__warning(self): with pytest.deprecated_call(match=CANCEL_TASKS_BY_DEFAULT): c.on_connection_error_after_connected(Mock()) + @pytest.mark.parametrize("broker_connection_retry", [True, False]) + @pytest.mark.parametrize("broker_connection_retry_on_startup", [None, False]) + def test_ensure_connected(self, subtests, broker_connection_retry, broker_connection_retry_on_startup): + c = self.get_consumer() + c.app.conf.broker_connection_retry_on_startup = broker_connection_retry_on_startup + c.app.conf.broker_connection_retry = broker_connection_retry + + if broker_connection_retry_on_startup is None: + with subtests.test("Deprecation warning when startup is None"): + with pytest.deprecated_call(): + c.ensure_connected(Mock()) + + if broker_connection_retry is False: + with subtests.test("Does not retry when connect throws an error and retry is set to false"): + conn = Mock() + conn.connect.side_effect = ConnectionError() + with pytest.raises(ConnectionError): + c.ensure_connected(conn) + + +@pytest.mark.parametrize( + "broker_connection_retry_on_startup,is_connection_loss_on_startup", + [ + pytest.param(False, True, id='shutdown on connection loss on startup'), + pytest.param(None, True, id='shutdown on connection loss on startup when retry on startup is undefined'), + pytest.param(False, False, id='shutdown on connection loss not on startup but startup is defined as false'), + pytest.param(None, False, id='shutdown on connection loss not on startup and startup is not defined'), + pytest.param(True, False, id='shutdown on connection loss not on startup but startup is defined as true'), + ] +) +class test_Consumer_WorkerShutdown(ConsumerTestCase): + + def test_start_raises_connection_error(self, + broker_connection_retry_on_startup, + is_connection_loss_on_startup, + caplog, subtests): + c = self.get_consumer() + # in order to reproduce the actual behavior: if this is the startup, then restart count has not been + # incremented yet, and is therefore -1. + c.restart_count = -1 if is_connection_loss_on_startup else 1 + c.app.conf['broker_connection_retry'] = False + c.app.conf['broker_connection_retry_on_startup'] = broker_connection_retry_on_startup + c.blueprint.start.side_effect = ConnectionError() + + with subtests.test("Consumer raises WorkerShutdown on connection restart"): + with pytest.raises(WorkerShutdown): + c.start() + + record = caplog.records[0] + with subtests.test("Critical error log message is outputted to the screen"): + assert record.levelname == "CRITICAL" + action = "establish" if is_connection_loss_on_startup else "re-establish" + expected_prefix = f"Retrying to {action}" + assert record.msg.startswith(expected_prefix) + conn_type_name = c._get_connection_retry_type( + is_connection_loss_on_startup + ) + expected_connection_retry_type = f"app.conf.{conn_type_name}=False" + assert expected_connection_retry_type in record.msg + class test_Heart: From 4a6bdb2e8e834540939283d85c46bbec117db3ca Mon Sep 17 00:00:00 2001 From: Mads Jensen Date: Sun, 6 Mar 2022 06:45:13 +0100 Subject: [PATCH 0029/1051] Remove __ne__ methods (#7257) * Remove __ne__ methods These are already defined as the opposite of __eq__ in Python 3, and when __eq__ returns NotImplemented, Python by default will return True. * Remove with_unique_field.__ne__ --- celery/beat.py | 8 -------- celery/events/state.py | 5 ----- celery/result.py | 12 ------------ celery/schedules.py | 15 --------------- celery/utils/collections.py | 4 ---- 5 files changed, 44 deletions(-) diff --git a/celery/beat.py b/celery/beat.py index 0cfa21559df..c81f038667f 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -195,14 +195,6 @@ def __eq__(self, other): """ return self.editable_fields_equal(other) - def __ne__(self, other): - """Test schedule entries inequality. - - Will only compare "editable" fields: - ``task``, ``schedule``, ``args``, ``kwargs``, ``options``. - """ - return not self == other - def _evaluate_entry_args(entry_args): if not entry_args: diff --git a/celery/events/state.py b/celery/events/state.py index febf1175145..d1848197921 100644 --- a/celery/events/state.py +++ b/celery/events/state.py @@ -137,11 +137,6 @@ def __eq__(this, other): return NotImplemented cls.__eq__ = __eq__ - def __ne__(this, other): - res = this.__eq__(other) - return True if res is NotImplemented else not res - cls.__ne__ = __ne__ - def __hash__(this): return hash(getattr(this, attr)) cls.__hash__ = __hash__ diff --git a/celery/result.py b/celery/result.py index 2a78484502e..637b99735a7 100644 --- a/celery/result.py +++ b/celery/result.py @@ -371,10 +371,6 @@ def __eq__(self, other): return other == self.id return NotImplemented - def __ne__(self, other): - res = self.__eq__(other) - return True if res is NotImplemented else not res - def __copy__(self): return self.__class__( self.id, self.backend, None, self.app, self.parent, @@ -830,10 +826,6 @@ def __eq__(self, other): return other.results == self.results return NotImplemented - def __ne__(self, other): - res = self.__eq__(other) - return True if res is NotImplemented else not res - def __repr__(self): return f'<{type(self).__name__}: [{", ".join(r.id for r in self.results)}]>' @@ -925,10 +917,6 @@ def __eq__(self, other): return other == self.id return NotImplemented - def __ne__(self, other): - res = self.__eq__(other) - return True if res is NotImplemented else not res - def __repr__(self): return f'<{type(self).__name__}: {self.id} [{", ".join(r.id for r in self.results)}]>' diff --git a/celery/schedules.py b/celery/schedules.py index 5ffbf4147e2..4aaa2400d1f 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -172,9 +172,6 @@ def __eq__(self, other): return self.run_every == other.run_every return self.run_every == other - def __ne__(self, other): - return not self.__eq__(other) - def __reduce__(self): return self.__class__, (self.run_every, self.relative, self.nowfun) @@ -638,12 +635,6 @@ def __eq__(self, other): ) return NotImplemented - def __ne__(self, other): - res = self.__eq__(other) - if res is NotImplemented: - return True - return not res - def maybe_schedule(s, relative=False, app=None): """Return schedule from number, timedelta, or actual schedule.""" @@ -827,9 +818,3 @@ def __eq__(self, other): other.lon == self.lon ) return NotImplemented - - def __ne__(self, other): - res = self.__eq__(other) - if res is NotImplemented: - return True - return not res diff --git a/celery/utils/collections.py b/celery/utils/collections.py index e83e2f40716..a19e7ecfb43 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -628,10 +628,6 @@ def __eq__(self, other): # type: (Any) -> bool return self._data == other._data - def __ne__(self, other): - # type: (Any) -> bool - return not self.__eq__(other) - def __repr__(self): # type: () -> str return REPR_LIMITED_SET.format( From d31ceff93bcc61f2d0bb4b9d49f02a35de3fe60f Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 9 Mar 2022 15:51:48 +0200 Subject: [PATCH 0030/1051] Fix typo. --- celery/worker/consumer/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index a9127142bb0..b4e4aee99ec 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -25,8 +25,8 @@ def start(self, c): """Start task consumer.""" c.update_strategies() - # - RabbitMQ 3.3 completely redefines how basic_qos works.. - # This will detect if the new qos smenatics is in effect, + # - RabbitMQ 3.3 completely redefines how basic_qos works... + # This will detect if the new qos semantics is in effect, # and if so make sure the 'apply_global' flag is set on qos updates. qos_global = not c.connection.qos_semantics_matches_spec From 41de2c73e46ac6ec73115ce63845b0567bd28d6b Mon Sep 17 00:00:00 2001 From: Sami Tahri Date: Thu, 20 Jan 2022 21:33:18 +0100 Subject: [PATCH 0031/1051] fix #7200 uid and gid --- celery/bin/worker.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/celery/bin/worker.py b/celery/bin/worker.py index e93f6ed6c0e..b3fc91e986b 100644 --- a/celery/bin/worker.py +++ b/celery/bin/worker.py @@ -325,6 +325,10 @@ def worker(ctx, hostname=None, pool_cls=None, app=None, uid=None, gid=None, argv.remove('--detach') if '-D' in argv: argv.remove('-D') + if "--uid" in argv: + argv.remove('--uid') + if "--gid" in argv: + argv.remove('--gid') return detach(sys.executable, argv, From 23ecf2c5a3b8264cfd102b0212984fbffba54ae2 Mon Sep 17 00:00:00 2001 From: Scott Percival Date: Tue, 8 Mar 2022 16:12:25 +0800 Subject: [PATCH 0032/1051] Remove exception-throwing from the signal handler --- celery/apps/worker.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 8f774ae3858..084f0b836f2 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -19,7 +19,6 @@ from celery import VERSION_BANNER, platforms, signals from celery.app import trace -from celery.exceptions import WorkerShutdown, WorkerTerminate from celery.loaders.app import AppLoader from celery.platforms import EX_FAILURE, EX_OK, check_privileges, isatty from celery.utils import static, term @@ -280,7 +279,7 @@ def set_process_status(self, info): def _shutdown_handler(worker, sig='TERM', how='Warm', - exc=WorkerShutdown, callback=None, exitcode=EX_OK): + callback=None, exitcode=EX_OK): def _handle_request(*args): with in_sighandler(): from celery.worker import state @@ -292,27 +291,24 @@ def _handle_request(*args): sender=worker.hostname, sig=sig, how=how, exitcode=exitcode, ) - if active_thread_count() > 1: - setattr(state, {'Warm': 'should_stop', - 'Cold': 'should_terminate'}[how], exitcode) - else: - raise exc(exitcode) + setattr(state, {'Warm': 'should_stop', + 'Cold': 'should_terminate'}[how], exitcode) _handle_request.__name__ = str(f'worker_{how}') platforms.signals[sig] = _handle_request if REMAP_SIGTERM == "SIGQUIT": install_worker_term_handler = partial( - _shutdown_handler, sig='SIGTERM', how='Cold', exc=WorkerTerminate, exitcode=EX_FAILURE, + _shutdown_handler, sig='SIGTERM', how='Cold', exitcode=EX_FAILURE, ) else: install_worker_term_handler = partial( - _shutdown_handler, sig='SIGTERM', how='Warm', exc=WorkerShutdown, + _shutdown_handler, sig='SIGTERM', how='Warm', ) if not is_jython: # pragma: no cover install_worker_term_hard_handler = partial( - _shutdown_handler, sig='SIGQUIT', how='Cold', exc=WorkerTerminate, + _shutdown_handler, sig='SIGQUIT', how='Cold', exitcode=EX_FAILURE, ) else: # pragma: no cover From 2a61aa299a40f336528c82e91506973ac8bd222b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Mar 2022 16:56:58 +0000 Subject: [PATCH 0033/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.31.0 → v2.31.1](https://github.com/asottile/pyupgrade/compare/v2.31.0...v2.31.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 31f86c6d9c3..28bf910f39b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.31.0 + rev: v2.31.1 hooks: - id: pyupgrade args: ["--py37-plus"] From 06662c0b994b7554712540beb589d30cb182a407 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 16 Mar 2022 15:45:48 +0200 Subject: [PATCH 0034/1051] Update logo URL --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index d82ab9995ae..34b89476984 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -.. image:: http://docs.celeryproject.org/en/latest/_images/celery-banner-small.png +.. image:: http://docs.celeryq.dev/en/latest/_images/celery-banner-small.png |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| From f81fa8588d26e9df427aed0dd815dbfe2977a148 Mon Sep 17 00:00:00 2001 From: Dulmandakh Date: Thu, 17 Mar 2022 16:59:14 +0800 Subject: [PATCH 0035/1051] update website url in README --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 34b89476984..b0f0e7abea6 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| :Version: 5.2.3 (dawn-chorus) -:Web: https://docs.celeryproject.org/en/stable/index.html +:Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ :Keywords: task, queue, job, async, rabbitmq, amqp, redis, From 179fa19a355daf40be600ada14bc5f273fa5b71f Mon Sep 17 00:00:00 2001 From: Luccas Quadros Date: Fri, 18 Mar 2022 17:56:45 -0300 Subject: [PATCH 0036/1051] Fix Get Started links --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index b0f0e7abea6..deaf08e61ea 100644 --- a/README.rst +++ b/README.rst @@ -102,10 +102,10 @@ getting started tutorials: A more complete overview, showing more features. .. _`First steps with Celery`: - http://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html + https://docs.celeryq.dev/en/stable/getting-started/first-steps-with-celery.html .. _`Next steps`: - http://docs.celeryproject.org/en/latest/getting-started/next-steps.html + https://docs.celeryq.dev/en/stable/getting-started/next-steps.html You can also get started with Celery by using a hosted broker transport CloudAMQP. The largest hosting provider of RabbitMQ is a proud sponsor of Celery. From aedd30b2186718e81fbd935d84f4d145a3fa0bca Mon Sep 17 00:00:00 2001 From: Oleg Hoefling Date: Fri, 18 Mar 2022 13:04:29 +0100 Subject: [PATCH 0037/1051] doc: fix broken reference to schedule.is_due method Signed-off-by: Oleg Hoefling --- celery/beat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/beat.py b/celery/beat.py index c81f038667f..b8f9be23a38 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -157,7 +157,7 @@ def update(self, other): }) def is_due(self): - """See :meth:`~celery.schedule.schedule.is_due`.""" + """See :meth:`~celery.schedules.schedule.is_due`.""" return self.schedule.is_due(self.last_run_at) def __iter__(self): From 7d4658eedef4b9d87974e1a59e26c2da77b1f961 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20V=C3=A1zquez=20Acosta?= Date: Wed, 23 Mar 2022 11:24:02 +0100 Subject: [PATCH 0038/1051] Update the package links (fixes #7372). --- celery/__init__.py | 2 +- setup.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/celery/__init__.py b/celery/__init__.py index abe15b29114..7372373f8f7 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -20,7 +20,7 @@ __version__ = '5.2.3' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' -__homepage__ = 'http://celeryproject.org' +__homepage__ = 'https://docs.celeryq.dev/' __docformat__ = 'restructuredtext' __keywords__ = 'task job queue distributed messaging actor' diff --git a/setup.py b/setup.py index da60b24b2d3..e650ceff4db 100755 --- a/setup.py +++ b/setup.py @@ -175,8 +175,8 @@ def run_tests(self): ] }, project_urls={ - "Documentation": "https://docs.celeryproject.org/en/latest/index.html", - "Changelog": "https://docs.celeryproject.org/en/stable/changelog.html", + "Documentation": "https://docs.celeryq.dev/en/stable/", + "Changelog": "https://docs.celeryq.dev/en/stable/changelog.html", "Code": "https://github.com/celery/celery", "Tracker": "https://github.com/celery/celery/issues", "Funding": "https://opencollective.com/celery" From 24f22a5dac7c5282e59f547112a1799156382f0e Mon Sep 17 00:00:00 2001 From: James Ostrander <11338926+jlost@users.noreply.github.com> Date: Wed, 23 Mar 2022 17:41:20 -0400 Subject: [PATCH 0039/1051] Update remaining website URL in README --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index deaf08e61ea..348d13602c7 100644 --- a/README.rst +++ b/README.rst @@ -226,7 +226,7 @@ tutorials, and an API reference. 最新的中文文档托管在 https://www.celerycn.io/ 中,包含用户指南、教程、API接口等。 -.. _`latest documentation`: http://docs.celeryproject.org/en/latest/ +.. _`latest documentation`: https://docs.celeryq.dev/en/latest/ .. _celery-installation: From fbda0089f08d7f2a8f00925dbc0b6e10bd779251 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sat, 2 Apr 2022 10:49:56 +0200 Subject: [PATCH 0040/1051] Add `mypy` to the pipeline (#7383) * Add typing to Celery This is a simple bootstrap of the process, adding some types to a few selected functions, based on comment annotations. MyPy is chosen as the default static analyzer for the types. * Add mypy to the pipeline * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Remove unused command from tox * Install mypy only on CPython * Remove wrong annotations * Update celery/utils/saferepr.py Co-authored-by: Mads Jensen Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/lint_python.yml | 23 ++++++++++++++++++----- celery/__main__.py | 2 +- celery/contrib/testing/worker.py | 2 +- celery/events/state.py | 10 +++++----- celery/utils/collections.py | 17 ++++++----------- pyproject.toml | 12 ++++++++++++ requirements/test.txt | 2 ++ tox.ini | 3 +++ 8 files changed, 48 insertions(+), 23 deletions(-) diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml index 8c262d25569..eafb4dfdff3 100644 --- a/.github/workflows/lint_python.yml +++ b/.github/workflows/lint_python.yml @@ -9,9 +9,22 @@ jobs: - uses: pre-commit/action@v2.0.3 - run: pip install --upgrade pip wheel - run: pip install bandit codespell flake8 isort pytest pyupgrade tox - - run: bandit -r . || true - - run: codespell --ignore-words-list="brane,gool,ist,sherif,wil" --quiet-level=2 --skip="*.key" || true + + - name: bandit + run: bandit -r . || true + + - name: Run CodeSpell + run: codespell --ignore-words-list="brane,gool,ist,sherif,wil" --quiet-level=2 --skip="*.key" || true - run: pip install -r requirements.txt || true - - run: tox || true - - run: pytest . || true - - run: pytest --doctest-modules . || true + + - name: Run tox + run: tox || true + + - name: Run pytest + run: pytest . || true + + - name: Test pytest with doctest + run: pytest --doctest-modules . || true + + - name: MyPy + run: tox -e mypy diff --git a/celery/__main__.py b/celery/__main__.py index e865ea4bdaa..8c48d7071af 100644 --- a/celery/__main__.py +++ b/celery/__main__.py @@ -7,7 +7,7 @@ __all__ = ('main',) -def main(): +def main() -> None: """Entrypoint to the ``celery`` umbrella command.""" if 'multi' not in sys.argv: maybe_patch_concurrency() diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index b4e68cb8dec..8467f85f3b4 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -167,7 +167,7 @@ def _start_worker_process(app, cluster.stopwait() -def setup_app_for_worker(app, loglevel, logfile): +def setup_app_for_worker(app, loglevel, logfile) -> None: # type: (Celery, Union[str, int], str) -> None """Setup the app to be used for starting an embedded worker.""" app.finalize() diff --git a/celery/events/state.py b/celery/events/state.py index d1848197921..f6fc2a59d4f 100644 --- a/celery/events/state.py +++ b/celery/events/state.py @@ -22,7 +22,7 @@ from itertools import islice from operator import itemgetter from time import time -from typing import Mapping +from typing import Mapping, Optional from weakref import WeakSet, ref from kombu.clocks import timetuple @@ -452,7 +452,7 @@ def clear_tasks(self, ready=True): with self._mutex: return self._clear_tasks(ready) - def _clear_tasks(self, ready=True): + def _clear_tasks(self, ready: bool = True): if ready: in_progress = { uuid: task for uuid, task in self.itertasks() @@ -470,7 +470,7 @@ def _clear(self, ready=True): self.event_count = 0 self.task_count = 0 - def clear(self, ready=True): + def clear(self, ready: bool = True): with self._mutex: return self._clear(ready) @@ -647,13 +647,13 @@ def rebuild_taskheap(self, timetuple=timetuple): ] heap.sort() - def itertasks(self, limit=None): + def itertasks(self, limit: Optional[int] = None): for index, row in enumerate(self.tasks.items()): yield row if limit and index + 1 >= limit: break - def tasks_by_time(self, limit=None, reverse=True): + def tasks_by_time(self, limit=None, reverse: bool = True): """Generator yielding tasks ordered by time. Yields: diff --git a/celery/utils/collections.py b/celery/utils/collections.py index a19e7ecfb43..dc32404c0f4 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -113,8 +113,7 @@ def __getattr__(self, k): raise AttributeError( f'{type(self).__name__!r} object has no attribute {k!r}') - def __setattr__(self, key, value): - # type: (str, Any) -> None + def __setattr__(self, key: str, value) -> None: """`d[key] = value -> d.key = value`.""" self[key] = value @@ -595,7 +594,7 @@ def purge(self, now=None): break # oldest item hasn't expired yet self.pop() - def pop(self, default=None): + def pop(self, default=None) -> Any: # type: (Any) -> Any """Remove and return the oldest item, or :const:`None` when empty.""" while self._heap: @@ -671,20 +670,17 @@ class Evictable: Empty = Empty - def evict(self): - # type: () -> None + def evict(self) -> None: """Force evict until maxsize is enforced.""" self._evict(range=count) - def _evict(self, limit=100, range=range): - # type: (int) -> None + def _evict(self, limit: int = 100, range=range) -> None: try: [self._evict1() for _ in range(limit)] except IndexError: pass - def _evict1(self): - # type: () -> None + def _evict1(self) -> None: if self._evictcount <= self.maxsize: raise IndexError() try: @@ -746,8 +742,7 @@ def __len__(self): # type: () -> int return self._len() - def __contains__(self, item): - # type: () -> bool + def __contains__(self, item) -> bool: return item in self.data def __reversed__(self): diff --git a/pyproject.toml b/pyproject.toml index 8ff14c4766b..1098174b0a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,15 @@ testpaths = "t/unit/" python_classes = "test_*" xdfail_strict=true markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module"] + +[tool.mypy] +warn_unused_configs = true +strict = false +warn_return_any = true +follow_imports = "skip" +show_error_codes = true +disallow_untyped_defs = true +ignore_missing_imports = true +files = [ + "celery/__main__.py", +] diff --git a/requirements/test.txt b/requirements/test.txt index 406e5f6f50f..63f5833d539 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,6 +4,8 @@ pytest-subtests pytest-timeout~=2.1.0 boto3>=1.9.178 moto>=2.2.6 +# typing extensions +mypy; platform_python_implementation=="CPython" pre-commit -r extras/yaml.txt -r extras/msgpack.txt diff --git a/tox.ini b/tox.ini index 39cfcb5e198..b9901ca35d3 100644 --- a/tox.ini +++ b/tox.ini @@ -78,9 +78,12 @@ basepython = 3.9: python3.9 3.10: python3.10 pypy3: pypy3 + mypy: python3.8 lint,apicheck,linkcheck,configcheck,bandit: python3.9 usedevelop = True +[testenv:mypy] +commands = python -m mypy --config-file pyproject.toml [testenv:apicheck] setenv = From 863d01d428fc1e267ec0aba439f9ba7c6d5ba8c7 Mon Sep 17 00:00:00 2001 From: Emmanuel Meric de Bellefon Date: Sat, 2 Apr 2022 20:47:10 +0200 Subject: [PATCH 0041/1051] Update schedules.py --- celery/schedules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/schedules.py b/celery/schedules.py index 4aaa2400d1f..8a2f3c9bc00 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -656,7 +656,7 @@ class solar(BaseSchedule): Notes: - Available event valus are: + Available event values are: - ``dawn_astronomical`` - ``dawn_nautical`` From 314d014e6d874a0d8ad073a97a90371685263483 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 3 Apr 2022 06:02:06 +0200 Subject: [PATCH 0042/1051] Annotate `celery/states.py` (#7395) * Annotate `celery/states.py` * Add comma on pyproject to avoid conflicts * Include signals.py as well --- celery/states.py | 10 +++++----- pyproject.toml | 2 ++ 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/celery/states.py b/celery/states.py index e807ed4822c..6e21a22b5da 100644 --- a/celery/states.py +++ b/celery/states.py @@ -78,7 +78,7 @@ NONE_PRECEDENCE = PRECEDENCE_LOOKUP[None] -def precedence(state): +def precedence(state: str) -> int: """Get the precedence index for state. Lower index means higher precedence. @@ -110,16 +110,16 @@ class state(str): False """ - def __gt__(self, other): + def __gt__(self, other: str) -> bool: return precedence(self) < precedence(other) - def __ge__(self, other): + def __ge__(self, other: str) -> bool: return precedence(self) <= precedence(other) - def __lt__(self, other): + def __lt__(self, other: str) -> bool: return precedence(self) > precedence(other) - def __le__(self, other): + def __le__(self, other: str) -> bool: return precedence(self) >= precedence(other) diff --git a/pyproject.toml b/pyproject.toml index 1098174b0a4..179660a0c35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,4 +15,6 @@ disallow_untyped_defs = true ignore_missing_imports = true files = [ "celery/__main__.py", + "celery/states.py", + "celery/signals.py", ] From 1ccd8871b80a1fcb12234e49695d818062275589 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 3 Apr 2022 07:43:27 +0200 Subject: [PATCH 0043/1051] Ignore coverage on TYPE_CHECKING --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 179660a0c35..72a2bfd3f50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,3 +18,9 @@ files = [ "celery/states.py", "celery/signals.py", ] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if TYPE_CHECKING:" +] From d230c3457b56f2705c1e924814c07c150b88af49 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:11:19 +0300 Subject: [PATCH 0044/1051] Expose more debugging information when receiving unkown tasks. (#7404) Since the message might have been delivered to the wrong worker due to a routing error, we need to emit the headers and delivery_info when logging the error as well as the message's body. --- celery/worker/consumer/consumer.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index c72493f5d02..86a79603683 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -81,6 +81,12 @@ The full contents of the message body was: %s + +Thw full contents of the message headers: +%s + +The delivery info for this task is: +%s """ #: Error message for when an invalid task message is received. @@ -511,7 +517,11 @@ def on_unknown_message(self, body, message): signals.task_rejected.send(sender=self, message=message, exc=None) def on_unknown_task(self, body, message, exc): - error(UNKNOWN_TASK_ERROR, exc, dump_body(message, body), + error(UNKNOWN_TASK_ERROR, + exc, + dump_body(message, body), + message.headers, + message.delivery_info, exc_info=True) try: id_, name = message.headers['id'], message.headers['task'] From 2d0cee5ba4c0bda1177384c2a24135c2d30597cb Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:11:49 +0300 Subject: [PATCH 0045/1051] Expose more debugging information when receiving unkown tasks. (#7405) Since the message might have been delivered to the wrong worker due to a routing error, we need to emit the headers and delivery_info when logging the error as well as the message's body. --- celery/worker/consumer/consumer.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index f12753f741f..c9b820e4966 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -80,6 +80,12 @@ The full contents of the message body was: %s + +Thw full contents of the message headers: +%s + +The delivery info for this task is: +%s """ #: Error message for when an invalid task message is received. @@ -540,7 +546,11 @@ def on_unknown_message(self, body, message): signals.task_rejected.send(sender=self, message=message, exc=None) def on_unknown_task(self, body, message, exc): - error(UNKNOWN_TASK_ERROR, exc, dump_body(message, body), + error(UNKNOWN_TASK_ERROR, + exc, + dump_body(message, body), + message.headers, + message.delivery_info, exc_info=True) try: id_, name = message.headers['id'], message.headers['task'] From a326cb591b31737840eb3c4afdd26022e84332a2 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:15:33 +0300 Subject: [PATCH 0046/1051] Added changelog entry. --- Changelog.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index daf7b52e019..3d12ee85762 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,16 @@ This document contains change notes for bugfix & new features in the & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.2.4: + +5.2.4 +===== + +:release-date: 2022-4-03 20:30 P.M UTC+2:00 +:release-by: Omer Katz + +- Expose more debugging information when receiving unknown tasks (#7404). + .. _version-5.2.3: 5.2.3 From 1218d2bc9cf73d2106aa3a5a19f434c069bbd321 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:16:01 +0300 Subject: [PATCH 0047/1051] =?UTF-8?q?Bump=20version:=205.2.3=20=E2=86=92?= =?UTF-8?q?=205.2.4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1a7dbf3b05d..c282c318395 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.2.3 +current_version = 5.2.4 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index d82ab9995ae..03b35b50f29 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.2.3 (dawn-chorus) +:Version: 5.2.4 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index df1fe1a6c05..d9773e9d47a 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.2.3' +__version__ = '5.2.4' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'http://celeryproject.org' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 0e97f80ffa0..ab0ae82240e 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.2.3 (dawn-chorus) +:Version: 5.2.4 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 20d88756ab9d9b62748193af048f17f91f7a9261 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20=C4=8Ciha=C5=99?= Date: Tue, 11 Jan 2022 13:36:11 +0100 Subject: [PATCH 0048/1051] Use importlib instead of deprecated pkg_resources This avoids runtime dependency on setuptools. --- celery/app/backends.py | 3 +-- celery/beat.py | 3 +-- celery/bin/celery.py | 8 ++++++-- celery/utils/imports.py | 12 ++++++------ docs/userguide/extending.rst | 2 +- requirements/default.txt | 2 +- 6 files changed, 16 insertions(+), 14 deletions(-) diff --git a/celery/app/backends.py b/celery/app/backends.py index 8f0390bf2b7..ab40ccaed9f 100644 --- a/celery/app/backends.py +++ b/celery/app/backends.py @@ -44,8 +44,7 @@ def by_name(backend=None, loader=None, backend = backend or 'disabled' loader = loader or current_app.loader aliases = dict(BACKEND_ALIASES, **loader.override_backends) - aliases.update( - load_extension_class_names(extension_namespace) or {}) + aliases.update(load_extension_class_names(extension_namespace)) try: cls = symbol_by_name(backend, aliases) except ValueError as exc: diff --git a/celery/beat.py b/celery/beat.py index d8a4fc9e8b2..74537e3469d 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -666,8 +666,7 @@ def stop(self, wait=False): def get_scheduler(self, lazy=False, extension_namespace='celery.beat_schedulers'): filename = self.schedule_filename - aliases = dict( - load_extension_class_names(extension_namespace) or {}) + aliases = dict(load_extension_class_names(extension_namespace)) return symbol_by_name(self.scheduler_cls, aliases=aliases)( app=self.app, schedule_filename=filename, diff --git a/celery/bin/celery.py b/celery/bin/celery.py index c6b862d0f10..2aee6414be4 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -3,12 +3,16 @@ import pathlib import traceback +try: + from importlib.metadata import entry_points +except ImportError: + from importlib_metadata import entry_points + import click import click.exceptions from click.types import ParamType from click_didyoumean import DYMGroup from click_plugins import with_plugins -from pkg_resources import iter_entry_points from celery import VERSION_BANNER from celery.app.utils import find_app @@ -71,7 +75,7 @@ def convert(self, value, param, ctx): APP = App() -@with_plugins(iter_entry_points('celery.commands')) +@with_plugins(entry_points().get('celery.commands', [])) @click.group(cls=DYMGroup, invoke_without_command=True) @click.option('-A', '--app', diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 0303bd3c051..9e841c6e2ea 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -6,6 +6,11 @@ from contextlib import contextmanager from importlib import reload +try: + from importlib.metadata import entry_points +except ImportError: + from importlib_metadata import entry_points + from kombu.utils.imports import symbol_by_name #: Billiard sets this when execv is enabled. @@ -137,12 +142,7 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): - try: - from pkg_resources import iter_entry_points - except ImportError: # pragma: no cover - return - - for ep in iter_entry_points(namespace): + for ep in entry_points().get(namespace, []): yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) diff --git a/docs/userguide/extending.rst b/docs/userguide/extending.rst index 59c8f83401e..ea8c0462598 100644 --- a/docs/userguide/extending.rst +++ b/docs/userguide/extending.rst @@ -829,7 +829,7 @@ New commands can be added to the :program:`celery` umbrella command by using Entry-points is special meta-data that can be added to your packages ``setup.py`` program, -and then after installation, read from the system using the :mod:`pkg_resources` module. +and then after installation, read from the system using the :mod:`importlib` module. Celery recognizes ``celery.commands`` entry-points to install additional sub-commands, where the value of the entry-point must point to a valid click diff --git a/requirements/default.txt b/requirements/default.txt index 509a43d9e5e..0203186c858 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -6,4 +6,4 @@ click>=8.0.3,<9.0 click-didyoumean>=0.0.3 click-repl>=0.2.0 click-plugins>=1.1.1 -setuptools>=59.1.1,<59.7.0 +importlib-metadata>=1.4.0; python_version < '3.8' From bbc704411415788d5ef504a0864d514be4fac29c Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:42:24 +0300 Subject: [PATCH 0049/1051] Added changelog entry. --- Changelog.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index 3d12ee85762..d38ffefb9cf 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,17 @@ This document contains change notes for bugfix & new features in the & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. + +.. _version-5.2.5: + +5.2.5 +===== + +:release-date: 2022-4-03 20:42 P.M UTC+2:00 +:release-by: Omer Katz + +- Use importlib instead of deprecated pkg_resources (#7218). + .. _version-5.2.4: 5.2.4 From e3f9f6a4e6e5eefe27219534ea0e94b9adcc9078 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Apr 2022 20:43:16 +0300 Subject: [PATCH 0050/1051] =?UTF-8?q?Bump=20version:=205.2.4=20=E2=86=92?= =?UTF-8?q?=205.2.5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index c282c318395..9ff614747e0 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.2.4 +current_version = 5.2.5 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 03b35b50f29..2f9ed396212 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.2.4 (dawn-chorus) +:Version: 5.2.5 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index d9773e9d47a..1fec7be8709 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.2.4' +__version__ = '5.2.5' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'http://celeryproject.org' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index ab0ae82240e..45b32667563 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.2.4 (dawn-chorus) +:Version: 5.2.5 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 07c719e0f75c7d62ba8ef789e2920667173ef76f Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:56:35 +0300 Subject: [PATCH 0051/1051] Update sphinx-click from 2.5.0 to 3.1.0 --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 46b82bd3c26..be7071f4500 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery==2.0.0 Sphinx>=3.0.0 sphinx-testing==0.7.2 -sphinx-click==2.5.0 +sphinx-click==3.1.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From dbef906b73b625cc8ffdc3a094544b42301f729a Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:56:47 +0300 Subject: [PATCH 0052/1051] Pin pytest-subtests to latest version 0.7.0 --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 63f5833d539..48d1641b0b7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ pytest~=7.0.1 pytest-celery -pytest-subtests +pytest-subtests==0.7.0 pytest-timeout~=2.1.0 boto3>=1.9.178 moto>=2.2.6 From 7b0663a899c5f067bd702e0c4f47de5f3a79abe2 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:56:52 +0300 Subject: [PATCH 0053/1051] Pin pre-commit to latest version 2.18.1 --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 48d1641b0b7..a84ae5cecbb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,6 +6,6 @@ boto3>=1.9.178 moto>=2.2.6 # typing extensions mypy; platform_python_implementation=="CPython" -pre-commit +pre-commit==2.18.1 -r extras/yaml.txt -r extras/msgpack.txt From 5ca5292fe7394c103f88ed3c8f8c432a5b49eee2 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:56:55 +0300 Subject: [PATCH 0054/1051] Pin msgpack to latest version 1.0.3 --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index 3aae276bcd9..ea1047efad5 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack +msgpack==1.0.3 From 18d25ada6ab51b83dc44481a26994c553882baa5 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 4 Apr 2022 07:01:00 -0700 Subject: [PATCH 0055/1051] Pin sqlalchemy to latest version 1.4.34 (#7412) * Pin sqlalchemy to latest version 1.4.34 * sqlalchemy~=1.4.34 Co-authored-by: Asif Saif Uddin --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 39fb2befb58..0f2e8f033eb 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy +sqlalchemy~=1.4.34 From 26ec4f79ed325a5d120ae48f18f92d7386cbab58 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:58:32 +0300 Subject: [PATCH 0056/1051] Pin pycouchdb to latest version 1.14.1 --- requirements/extras/couchdb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/couchdb.txt b/requirements/extras/couchdb.txt index bc7a1a32b9f..0e21a4ff6b6 100644 --- a/requirements/extras/couchdb.txt +++ b/requirements/extras/couchdb.txt @@ -1 +1 @@ -pycouchdb +pycouchdb==1.14.1 From 29e6c774777210e7bcb5917f6754090dc88250a0 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 4 Apr 2022 07:06:34 -0700 Subject: [PATCH 0057/1051] Update sphinx-testing from 0.7.2 to 1.0.1 (#7410) Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index be7071f4500..d83a874a1cb 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,6 +1,6 @@ sphinx_celery==2.0.0 Sphinx>=3.0.0 -sphinx-testing==0.7.2 +sphinx-testing==1.0.1 sphinx-click==3.1.0 -r extras/sqlalchemy.txt -r test.txt From f20bc224f5126b5b80c2ba5a2617e635d9ed8908 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 4 Apr 2022 07:07:25 -0700 Subject: [PATCH 0058/1051] Pin pytest to latest version 7.1.1 (#7413) * Pin pytest to latest version 7.1.1 * pytest~=7.1.1 Co-authored-by: Asif Saif Uddin --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index a84ae5cecbb..179133446e5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest~=7.0.1 +pytest~=7.1.1 pytest-celery pytest-subtests==0.7.0 pytest-timeout~=2.1.0 From 45f87ac4b4a1d7078dced4b3260508572c3ca8db Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:56:57 +0300 Subject: [PATCH 0059/1051] Pin cryptography to latest version 36.0.2 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 0d38bc5ea25..682fb872fcb 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography +cryptography==36.0.2 From 6b8f6dabe2bff932ad8d9612668c49285f403056 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:58:29 +0300 Subject: [PATCH 0060/1051] Update pydocumentdb from 2.3.2 to 2.3.5 --- requirements/extras/cosmosdbsql.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/cosmosdbsql.txt b/requirements/extras/cosmosdbsql.txt index 23e1783b2fd..349dcf8bebb 100644 --- a/requirements/extras/cosmosdbsql.txt +++ b/requirements/extras/cosmosdbsql.txt @@ -1 +1 @@ -pydocumentdb==2.3.2 +pydocumentdb==2.3.5 From e69bc33e55b02bf9b7cfa70aa03d1393fd956d48 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:58:39 +0300 Subject: [PATCH 0061/1051] Pin pyro4 to latest version 4.82 --- requirements/extras/pyro.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pyro.txt b/requirements/extras/pyro.txt index d19b0db3892..bde9e2995b9 100644 --- a/requirements/extras/pyro.txt +++ b/requirements/extras/pyro.txt @@ -1 +1 @@ -pyro4 +pyro4==4.82 From 15cec06da49b00aee4a86f1df0a4667fe2543d2a Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:58:36 +0300 Subject: [PATCH 0062/1051] Pin pylibmc to latest version 1.6.1 --- requirements/extras/memcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/memcache.txt b/requirements/extras/memcache.txt index 32da5dcc24a..c6122cbd4a2 100644 --- a/requirements/extras/memcache.txt +++ b/requirements/extras/memcache.txt @@ -1 +1 @@ -pylibmc; platform_system != "Windows" +pylibmc==1.6.1; platform_system != "Windows" From 14677c83c6a12b066207567cc703bb7995ba30aa Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 15:58:38 +0300 Subject: [PATCH 0063/1051] Pin python-memcached to latest version 1.59 --- requirements/extras/pymemcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pymemcache.txt b/requirements/extras/pymemcache.txt index 851bfd86d9b..24743088b93 100644 --- a/requirements/extras/pymemcache.txt +++ b/requirements/extras/pymemcache.txt @@ -1 +1 @@ -python-memcached +python-memcached==1.59 From 404149ea85d8c04ec2ef7f4c75273eb0686805bf Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Apr 2022 16:00:03 +0300 Subject: [PATCH 0064/1051] Pin codecov to latest version 2.1.12 --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 26aaa089f31..63a15706a7c 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,6 +1,6 @@ pytest-cov pytest-github-actions-annotate-failures -codecov +codecov==2.1.12 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt From a5a0341072713de81ee5580a23c27a975acfc5a8 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 4 Apr 2022 07:18:38 -0700 Subject: [PATCH 0065/1051] Pin kombu to latest version 5.2.4 (#7427) * Pin kombu to latest version 5.2.4 * kombu[sqs]~=5.2.4 Co-authored-by: Asif Saif Uddin --- requirements/extras/sqs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 8a7fc342f07..b4d8f05da78 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1 @@ -kombu[sqs] +kombu[sqs]~=5.2.4 From 9ad4fffba4b03b8f48a71e5a8df6aff635c5b232 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 4 Apr 2022 07:22:33 -0700 Subject: [PATCH 0066/1051] Pin ephem to latest version 4.1.3 (#7430) * ephem~=4.1.3 Co-authored-by: Asif Saif Uddin --- requirements/extras/solar.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/solar.txt b/requirements/extras/solar.txt index 6be7adf94ff..e77d1f1752a 100644 --- a/requirements/extras/solar.txt +++ b/requirements/extras/solar.txt @@ -1 +1 @@ -ephem; platform_python_implementation!="PyPy" +ephem~=4.1.3; platform_python_implementation!="PyPy" From ba68cd734a0d1b240121c8be48e58d95e93b47ad Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 5 Apr 2022 11:12:49 +0300 Subject: [PATCH 0067/1051] Backport #7406 to 5.2 (#7431) * load_extension_class_names - correct module_name 95015a changed over to using importlib rather than pkg_resources, unfortunately the object is not exactly the same. Attempting to start up a celery instance with `django-celery-results` installed results in an exception during `load_extension_class_names`; ``` During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib/python3.10/site-packages/celery/worker/worker.py", line 203, in start self.blueprint.start(self) File "/usr/lib/python3.10/site-packages/celery/bootsteps.py", line 112, in start self.on_start() File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 136, in on_start self.emit_banner() File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 170, in emit_banner ' \n', self.startup_info(artlines=not use_image))), File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 232, in startup_info results=self.app.backend.as_uri(), File "/usr/lib/python3.10/site-packages/celery/app/base.py", line 1252, in backend self._local.backend = new_backend = self._get_backend() File "/usr/lib/python3.10/site-packages/celery/app/base.py", line 955, in _get_backend backend, url = backends.by_url( File "/usr/lib/python3.10/site-packages/celery/app/backends.py", line 69, in by_url return by_name(backend, loader), url File "/usr/lib/python3.10/site-packages/celery/app/backends.py", line 47, in by_name aliases.update(load_extension_class_names(extension_namespace)) File "/usr/lib/python3.10/site-packages/celery/utils/imports.py", line 146, in load_extension_class_names yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) AttributeError: 'EntryPoint' object has no attribute 'module_name' ``` Move over to using the direct value should resolve this issue; ``` >>> from pkg_resources import iter_entry_points >>> list(iter_entry_points('celery.result_backends'))[0].__dict__ {'name': 'django-cache', 'module_name': 'django_celery_results.backends', 'attrs': ('CacheBackend',), 'extras': (), 'dist': django-celery-results 2.3.0 (/usr/lib/python3.10/site-packages)} ``` vs ``` >>> from importlib.metadata import entry_points >>> entry_points().get('celery.result_backends')[0] EntryPoint(name='django-cache', value='django_celery_results.backends:CacheBackend', group='celery.result_backends') ``` * Update changelog. Co-authored-by: Damian Zaremba Co-authored-by: Omer Katz --- Changelog.rst | 13 +++++++++++++ celery/utils/imports.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index d38ffefb9cf..44e6b921a01 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -9,6 +9,17 @@ in the & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.2.6: + +5.2.6 +===== + +:release-date: 2022-4-04 21:15 P.M UTC+2:00 +:release-by: Omer Katz + +- load_extension_class_names - correct module_name (#7406). + This fixes a regression caused by #7218. + .. _version-5.2.5: 5.2.5 @@ -17,6 +28,8 @@ an overview of what's new in Celery 5.2. :release-date: 2022-4-03 20:42 P.M UTC+2:00 :release-by: Omer Katz +**This release was yanked due to a regression caused by the PR below** + - Use importlib instead of deprecated pkg_resources (#7218). .. _version-5.2.4: diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 9e841c6e2ea..6fcdf2e0e17 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -143,7 +143,7 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): for ep in entry_points().get(namespace, []): - yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) + yield ep.name, ep.value def load_extension_classes(namespace): From a31d6f45334ef8d9df93cae78338dabc81028b5b Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 5 Apr 2022 11:24:07 +0300 Subject: [PATCH 0068/1051] Revert "Backport #7406 to 5.2 (#7431)" (#7432) This reverts commit ba68cd734a0d1b240121c8be48e58d95e93b47ad. --- Changelog.rst | 13 ------------- celery/utils/imports.py | 2 +- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 44e6b921a01..d38ffefb9cf 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -9,17 +9,6 @@ in the & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. -.. _version-5.2.6: - -5.2.6 -===== - -:release-date: 2022-4-04 21:15 P.M UTC+2:00 -:release-by: Omer Katz - -- load_extension_class_names - correct module_name (#7406). - This fixes a regression caused by #7218. - .. _version-5.2.5: 5.2.5 @@ -28,8 +17,6 @@ an overview of what's new in Celery 5.2. :release-date: 2022-4-03 20:42 P.M UTC+2:00 :release-by: Omer Katz -**This release was yanked due to a regression caused by the PR below** - - Use importlib instead of deprecated pkg_resources (#7218). .. _version-5.2.4: diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 6fcdf2e0e17..9e841c6e2ea 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -143,7 +143,7 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): for ep in entry_points().get(namespace, []): - yield ep.name, ep.value + yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) def load_extension_classes(namespace): From 386ec958ce2e22aef5ae4834ccc944dc539ed817 Mon Sep 17 00:00:00 2001 From: "Jose R. K" Date: Wed, 6 Apr 2022 11:32:56 -0300 Subject: [PATCH 0069/1051] chore: correct call to worker_main() --- docs/userguide/application.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/userguide/application.rst b/docs/userguide/application.rst index 502353d1013..1ba8cb5aad2 100644 --- a/docs/userguide/application.rst +++ b/docs/userguide/application.rst @@ -81,7 +81,8 @@ with :meth:`@worker_main`: def add(x, y): return x + y if __name__ == '__main__': - app.worker_main() + args = ['worker', '--loglevel=INFO'] + app.worker_main(argv=args) When this module is executed the tasks will be named starting with "``__main__``", but when the module is imported by another process, say to call a task, From 56ed6c622745adb2ffd10c2928f89c71331adb94 Mon Sep 17 00:00:00 2001 From: Thejesh GN Date: Thu, 7 Apr 2022 12:15:31 +0530 Subject: [PATCH 0070/1051] Updated logo url in readme --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index c7376332be6..ed771180f22 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -.. image:: http://docs.celeryproject.org/en/latest/_images/celery-banner-small.png +.. image:: https://docs.celeryq.dev/en/latest/_images/celery-banner-small.png |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| From e6cde8371121fadc33d18b9425550b0abdb70efc Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 7 Apr 2022 15:10:27 +0300 Subject: [PATCH 0071/1051] Update sphinx-click from 3.1.0 to 4.0.0 --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index d83a874a1cb..304779f606c 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery==2.0.0 Sphinx>=3.0.0 sphinx-testing==1.0.1 -sphinx-click==3.1.0 +sphinx-click==4.0.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From b260860988469ef8ad74f2d4225839c2fa91d590 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sat, 9 Apr 2022 13:27:58 +0300 Subject: [PATCH 0072/1051] Avoid importing buf_t from billiard's compat module as it was removed. buf_t was a compatibility layer for 2.7, it's no longer needed so it was removed from billiard. We should adjust the code in Celery as well. --- celery/concurrency/asynpool.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index b9f2875a261..489336936c1 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -26,7 +26,7 @@ from weakref import WeakValueDictionary, ref from billiard import pool as _pool -from billiard.compat import buf_t, isblocking, setblocking +from billiard.compat import isblocking, setblocking from billiard.pool import ACK, NACK, RUN, TERMINATE, WorkersJoined from billiard.queues import _SimpleQueue from kombu.asynchronous import ERR, WRITE @@ -868,7 +868,7 @@ def send_job(tup): header = pack('>I', body_size) # index 1,0 is the job ID. job = get_job(tup[1][0]) - job._payload = buf_t(header), buf_t(body), body_size + job._payload = memoryview(header), memoryview(body), body_size put_message(job) self._quick_put = send_job From 3d395e8e4b276d92b8bbd7ef287de6dc8e0826f2 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 10 Apr 2022 15:45:38 +0300 Subject: [PATCH 0073/1051] Avoid negating a constant in a loop. (#7443) * Avoid negating a constant in a loop. Since `intermediate` is a constant, we can negate it in advance * Happify linter --- celery/result.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/celery/result.py b/celery/result.py index 637b99735a7..2f468fc60cb 100644 --- a/celery/result.py +++ b/celery/result.py @@ -300,13 +300,15 @@ def get_leaf(self): def iterdeps(self, intermediate=False): stack = deque([(None, self)]) + is_incomplete_stream = not intermediate + while stack: parent, node = stack.popleft() yield parent, node if node.ready(): stack.extend((node, child) for child in node.children or []) else: - if not intermediate: + if is_incomplete_stream: raise IncompleteStream() def ready(self): From 7126fb60378b387eca311ed8fe64d4dc6f487369 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Damjan=20Ku=C5=BEnar?= Date: Mon, 11 Apr 2022 11:36:19 +0200 Subject: [PATCH 0074/1051] Ensure expiration is of float type when migrating tasks (#7385) * Ensure expiration is of float type when migrating tasks * Remove whitespace * Add expiration as keyword argument to publish --- celery/contrib/migrate.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/celery/contrib/migrate.py b/celery/contrib/migrate.py index ec3751e706b..dd77801762f 100644 --- a/celery/contrib/migrate.py +++ b/celery/contrib/migrate.py @@ -63,13 +63,18 @@ def republish(producer, message, exchange=None, routing_key=None, # when the message is recompressed. compression = headers.pop('compression', None) + expiration = props.pop('expiration', None) + # ensure expiration is a float + expiration = float(expiration) if expiration is not None else None + for key in remove_props: props.pop(key, None) producer.publish(ensure_bytes(body), exchange=exchange, routing_key=routing_key, compression=compression, headers=headers, content_type=ctype, - content_encoding=enc, **props) + content_encoding=enc, expiration=expiration, + **props) def migrate_task(producer, body_, message, queues=None): From 34c21fea6bd7de72d25f43706b406eb7438b6760 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 16:52:02 +0000 Subject: [PATCH 0075/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.31.1 → v2.32.0](https://github.com/asottile/pyupgrade/compare/v2.31.1...v2.32.0) - [github.com/pre-commit/pre-commit-hooks: v4.1.0 → v4.2.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.1.0...v4.2.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28bf910f39b..f667ad0f237 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.31.1 + rev: v2.32.0 hooks: - id: pyupgrade args: ["--py37-plus"] @@ -16,7 +16,7 @@ repos: - id: yesqa - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.2.0 hooks: - id: check-merge-conflict - id: check-toml From de65e6abbdf4a863bcbbe70912af2b76c6f53d87 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Tue, 12 Apr 2022 15:55:09 +0200 Subject: [PATCH 0076/1051] Annotate `celery/fixups` (#7399) --- celery/fixups/django.py | 70 ++++++++++++++++++++++++----------------- pyproject.toml | 2 +- 2 files changed, 43 insertions(+), 29 deletions(-) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 019e695ea2e..59fcb9e26b8 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -4,6 +4,7 @@ import warnings from datetime import datetime from importlib import import_module +from typing import IO, TYPE_CHECKING, Any, List, Optional, cast from kombu.utils.imports import symbol_by_name from kombu.utils.objects import cached_property @@ -11,6 +12,19 @@ from celery import _state, signals from celery.exceptions import FixupWarning, ImproperlyConfigured +if TYPE_CHECKING: + from types import ModuleType + from typing import Protocol + + from django.db.utils import ConnectionHandler + + from celery.app.base import Celery + from celery.app.task import Task + + class DjangoDBModule(Protocol): + connections: ConnectionHandler + + __all__ = ('DjangoFixup', 'fixup') ERR_NOT_INSTALLED = """\ @@ -19,7 +33,7 @@ """ -def _maybe_close_fd(fh): +def _maybe_close_fd(fh: IO) -> None: try: os.close(fh.fileno()) except (AttributeError, OSError, TypeError): @@ -27,12 +41,12 @@ def _maybe_close_fd(fh): pass -def _verify_django_version(django): +def _verify_django_version(django: "ModuleType") -> None: if django.VERSION < (1, 11): raise ImproperlyConfigured('Celery 5.x requires Django 1.11 or later.') -def fixup(app, env='DJANGO_SETTINGS_MODULE'): +def fixup(app: "Celery", env: str = 'DJANGO_SETTINGS_MODULE') -> Optional["DjangoFixup"]: """Install Django fixup if settings module environment is set.""" SETTINGS_MODULE = os.environ.get(env) if SETTINGS_MODULE and 'django' not in app.loader_cls.lower(): @@ -43,18 +57,19 @@ def fixup(app, env='DJANGO_SETTINGS_MODULE'): else: _verify_django_version(django) return DjangoFixup(app).install() + return None class DjangoFixup: """Fixup installed when using Django.""" - def __init__(self, app): + def __init__(self, app: "Celery"): self.app = app if _state.default_app is None: self.app.set_default() - self._worker_fixup = None + self._worker_fixup: Optional["DjangoWorkerFixup"] = None - def install(self): + def install(self) -> "DjangoFixup": # Need to add project directory to path. # The project directory has precedence over system modules, # so we prepend it to the path. @@ -68,41 +83,41 @@ def install(self): return self @property - def worker_fixup(self): + def worker_fixup(self) -> "DjangoWorkerFixup": if self._worker_fixup is None: self._worker_fixup = DjangoWorkerFixup(self.app) return self._worker_fixup @worker_fixup.setter - def worker_fixup(self, value): + def worker_fixup(self, value: "DjangoWorkerFixup") -> None: self._worker_fixup = value - def on_import_modules(self, **kwargs): + def on_import_modules(self, **kwargs: Any) -> None: # call django.setup() before task modules are imported self.worker_fixup.validate_models() - def on_worker_init(self, **kwargs): + def on_worker_init(self, **kwargs: Any) -> None: self.worker_fixup.install() - def now(self, utc=False): + def now(self, utc: bool = False) -> datetime: return datetime.utcnow() if utc else self._now() - def autodiscover_tasks(self): + def autodiscover_tasks(self) -> List[str]: from django.apps import apps return [config.name for config in apps.get_app_configs()] @cached_property - def _now(self): + def _now(self) -> datetime: return symbol_by_name('django.utils.timezone:now') class DjangoWorkerFixup: _db_recycles = 0 - def __init__(self, app): + def __init__(self, app: "Celery") -> None: self.app = app self.db_reuse_max = self.app.conf.get('CELERY_DB_REUSE_MAX', None) - self._db = import_module('django.db') + self._db = cast("DjangoDBModule", import_module('django.db')) self._cache = import_module('django.core.cache') self._settings = symbol_by_name('django.conf:settings') @@ -111,16 +126,16 @@ def __init__(self, app): ) self.DatabaseError = symbol_by_name('django.db:DatabaseError') - def django_setup(self): + def django_setup(self) -> None: import django django.setup() - def validate_models(self): + def validate_models(self) -> None: from django.core.checks import run_checks self.django_setup() run_checks() - def install(self): + def install(self) -> "DjangoWorkerFixup": signals.beat_embedded_init.connect(self.close_database) signals.worker_ready.connect(self.on_worker_ready) signals.task_prerun.connect(self.on_task_prerun) @@ -130,7 +145,7 @@ def install(self): self.close_cache() return self - def on_worker_process_init(self, **kwargs): + def on_worker_process_init(self, **kwargs: Any) -> None: # Child process must validate models again if on Windows, # or if they were started using execv. if os.environ.get('FORKED_BY_MULTIPROCESSING'): @@ -152,25 +167,24 @@ def on_worker_process_init(self, **kwargs): self._close_database(force=True) self.close_cache() - def _maybe_close_db_fd(self, fd): + def _maybe_close_db_fd(self, fd: IO) -> None: try: _maybe_close_fd(fd) except self.interface_errors: pass - def on_task_prerun(self, sender, **kwargs): + def on_task_prerun(self, sender: "Task", **kwargs: Any) -> None: """Called before every task.""" if not getattr(sender.request, 'is_eager', False): self.close_database() - def on_task_postrun(self, sender, **kwargs): - # See https://groups.google.com/group/django-users/ - # browse_thread/thread/78200863d0c07c6d/ + def on_task_postrun(self, sender: "Task", **kwargs: Any) -> None: + # See https://groups.google.com/group/django-users/browse_thread/thread/78200863d0c07c6d/ if not getattr(sender.request, 'is_eager', False): self.close_database() self.close_cache() - def close_database(self, **kwargs): + def close_database(self, **kwargs: Any) -> None: if not self.db_reuse_max: return self._close_database() if self._db_recycles >= self.db_reuse_max * 2: @@ -178,7 +192,7 @@ def close_database(self, **kwargs): self._close_database() self._db_recycles += 1 - def _close_database(self, force=False): + def _close_database(self, force: bool = False) -> None: for conn in self._db.connections.all(): try: if force: @@ -192,13 +206,13 @@ def _close_database(self, force=False): if 'closed' not in str_exc and 'not connected' not in str_exc: raise - def close_cache(self): + def close_cache(self) -> None: try: self._cache.close_caches() except (TypeError, AttributeError): pass - def on_worker_ready(self, **kwargs): + def on_worker_ready(self, **kwargs: Any) -> None: if self._settings.DEBUG: warnings.warn('''Using settings.DEBUG leads to a memory leak, never use this setting in production environments!''') diff --git a/pyproject.toml b/pyproject.toml index 72a2bfd3f50..830b1f2683c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,6 @@ markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", [tool.mypy] warn_unused_configs = true strict = false -warn_return_any = true follow_imports = "skip" show_error_codes = true disallow_untyped_defs = true @@ -17,6 +16,7 @@ files = [ "celery/__main__.py", "celery/states.py", "celery/signals.py", + "celery/fixups", ] [tool.coverage.report] From 2748356823456257472f2a7c41d55cc681e2b3d0 Mon Sep 17 00:00:00 2001 From: Mark Walker Date: Tue, 12 Apr 2022 16:07:02 +0100 Subject: [PATCH 0077/1051] Fix: code block formatting for task deprecation --- docs/internals/deprecation.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/internals/deprecation.rst b/docs/internals/deprecation.rst index 23d03ad36f7..59105ba7ac4 100644 --- a/docs/internals/deprecation.rst +++ b/docs/internals/deprecation.rst @@ -34,7 +34,7 @@ Compat Task Modules from celery import task -- Module ``celery.task`` will be removed +- Module ``celery.task`` will be removed This means you should change: @@ -49,6 +49,7 @@ Compat Task Modules from celery import shared_task -- and: + .. code-block:: python from celery import task From 0ddc929fc8591b2b80bd732eec0c416e062c370e Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 12 Apr 2022 18:49:24 +0300 Subject: [PATCH 0078/1051] Update sphinx-click from 4.0.0 to 4.0.1 --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 304779f606c..715e6fca4f2 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery==2.0.0 Sphinx>=3.0.0 sphinx-testing==1.0.1 -sphinx-click==4.0.0 +sphinx-click==4.0.1 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From ab3231dea14501c0159d3caa1fcf83689eb6db2d Mon Sep 17 00:00:00 2001 From: Tim Tisdall Date: Wed, 13 Apr 2022 07:57:34 -0400 Subject: [PATCH 0079/1051] celeryproject.org links in github templates (#7442) * update URL to contributing guidelines * fix URL to contributing guidelines --- .github/ISSUE_TEMPLATE/Bug-Report.md | 2 +- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/Bug-Report.md b/.github/ISSUE_TEMPLATE/Bug-Report.md index 25a9be322a1..71f46e30d69 100644 --- a/.github/ISSUE_TEMPLATE/Bug-Report.md +++ b/.github/ISSUE_TEMPLATE/Bug-Report.md @@ -15,7 +15,7 @@ To check an item on the list replace [ ] with [x]. - [ ] I have verified that the issue exists against the `master` branch of Celery. - [ ] This has already been asked to the [discussions forum](https://github.com/celery/celery/discussions) first. - [ ] I have read the relevant section in the - [contribution guide](http://docs.celeryproject.org/en/latest/contributing.html#other-bugs) + [contribution guide](https://docs.celeryq.dev/en/master/contributing.html#other-bugs) on reporting bugs. - [ ] I have checked the [issues list](https://github.com/celery/celery/issues?q=is%3Aissue+label%3A%22Issue+Type%3A+Bug+Report%22+-label%3A%22Category%3A+Documentation%22) for similar or identical bug reports. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5429534594c..b9e27ef0915 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ *Note*: Before submitting this pull request, please review our [contributing -guidelines](https://docs.celeryproject.org/en/master/contributing.html). +guidelines](https://docs.celeryq.dev/en/master/contributing.html). ## Description From a7cad184fd96ccaa36ce02a84eb4ba93afa7dec9 Mon Sep 17 00:00:00 2001 From: Simon Mazenoux Date: Thu, 14 Apr 2022 08:55:32 +0200 Subject: [PATCH 0080/1051] fix userguide deamonizing by changing the systemd --version by systemctl --version --- docs/userguide/daemonizing.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/userguide/daemonizing.rst b/docs/userguide/daemonizing.rst index c2ea8a57645..322aa33eaaf 100644 --- a/docs/userguide/daemonizing.rst +++ b/docs/userguide/daemonizing.rst @@ -14,9 +14,9 @@ You can check if your Linux distribution uses systemd by typing: .. code-block:: console - $ systemd --version - systemd 237 - +PAM +AUDIT +SELINUX +IMA +APPARMOR +SMACK +SYSVINIT +UTMP +LIBCRYPTSETUP +GCRYPT +GNUTLS +ACL +XZ +LZ4 +SECCOMP +BLKID +ELFUTILS +KMOD -IDN2 +IDN -PCRE2 default-hierarchy=hybrid + $ systemctl --version + systemd 249 (v249.9-1.fc35) + +PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 +PWQUALITY +P11KIT +QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified If you have output similar to the above, please refer to :ref:`our systemd documentation ` for guidance. From 231d3fc0a20786ed38968acf48e182301431d37a Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Thu, 14 Apr 2022 15:02:41 +0300 Subject: [PATCH 0081/1051] Fix typo. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 830b1f2683c..e4ac5e78960 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ addopts = "--strict-markers" testpaths = "t/unit/" python_classes = "test_*" -xdfail_strict=true +xfail_strict=true markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module"] [tool.mypy] From 9af72361742fb39c7252f797f3ba6bb3f9c0bb45 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 14 Apr 2022 14:46:54 +0300 Subject: [PATCH 0082/1051] Update sphinx-click from 4.0.1 to 4.0.2 --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 715e6fca4f2..d1d245930d4 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery==2.0.0 Sphinx>=3.0.0 sphinx-testing==1.0.1 -sphinx-click==4.0.1 +sphinx-click==4.0.2 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From 55c8ca185f5fe4a156cf59aa01404e123757b981 Mon Sep 17 00:00:00 2001 From: Damian Zaremba Date: Thu, 14 Apr 2022 15:33:10 +0100 Subject: [PATCH 0083/1051] load_extension_class_names - correct module_name (#7406) 95015a changed over to using importlib rather than pkg_resources, unfortunately the object is not exactly the same. Attempting to start up a celery instance with `django-celery-results` installed results in an exception during `load_extension_class_names`; ``` During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib/python3.10/site-packages/celery/worker/worker.py", line 203, in start self.blueprint.start(self) File "/usr/lib/python3.10/site-packages/celery/bootsteps.py", line 112, in start self.on_start() File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 136, in on_start self.emit_banner() File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 170, in emit_banner ' \n', self.startup_info(artlines=not use_image))), File "/usr/lib/python3.10/site-packages/celery/apps/worker.py", line 232, in startup_info results=self.app.backend.as_uri(), File "/usr/lib/python3.10/site-packages/celery/app/base.py", line 1252, in backend self._local.backend = new_backend = self._get_backend() File "/usr/lib/python3.10/site-packages/celery/app/base.py", line 955, in _get_backend backend, url = backends.by_url( File "/usr/lib/python3.10/site-packages/celery/app/backends.py", line 69, in by_url return by_name(backend, loader), url File "/usr/lib/python3.10/site-packages/celery/app/backends.py", line 47, in by_name aliases.update(load_extension_class_names(extension_namespace)) File "/usr/lib/python3.10/site-packages/celery/utils/imports.py", line 146, in load_extension_class_names yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) AttributeError: 'EntryPoint' object has no attribute 'module_name' ``` Move over to using the direct value should resolve this issue; ``` >>> from pkg_resources import iter_entry_points >>> list(iter_entry_points('celery.result_backends'))[0].__dict__ {'name': 'django-cache', 'module_name': 'django_celery_results.backends', 'attrs': ('CacheBackend',), 'extras': (), 'dist': django-celery-results 2.3.0 (/usr/lib/python3.10/site-packages)} ``` vs ``` >>> from importlib.metadata import entry_points >>> entry_points().get('celery.result_backends')[0] EntryPoint(name='django-cache', value='django_celery_results.backends:CacheBackend', group='celery.result_backends') ``` --- celery/utils/imports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 9e841c6e2ea..6fcdf2e0e17 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -143,7 +143,7 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): for ep in entry_points().get(namespace, []): - yield ep.name, ':'.join([ep.module_name, ep.attrs[0]]) + yield ep.name, ep.value def load_extension_classes(namespace): From 9e324caaa6b175d8e51d3582378b78757e66a12d Mon Sep 17 00:00:00 2001 From: dobosevych Date: Thu, 14 Apr 2022 18:22:33 +0300 Subject: [PATCH 0084/1051] Integration test fix (#7460) * Integration debugging * Integration debugging * Integration debugging * Commented tasks that aren't working * Fixed test_inspect.py * Fixed serialization test_canvas.py * Request fixes * Setup full pipeline * Setup full pipeline * Setup full pipeline * Setup python-package.yml * Setup python-package.yml * Added 3.10 to integration tests * test_task.py fixed * test_generator fixed * Added parametrization to test_generation * fixed test_generator * Reverted encoding in test_canvas.py * Rollback codecov * Retries now respect additional options. Previously, expires and other options were not merged with the current task's options. This commit fixes the issue. Co-authored-by: Omer Katz --- .github/workflows/python-package.yml | 46 +++++++++++++++++++++++++++- celery/app/task.py | 2 +- celery/canvas.py | 13 +++++--- celery/contrib/pytest.py | 2 +- celery/worker/request.py | 2 +- requirements/test-integration.txt | 1 + t/integration/tasks.py | 7 +++-- t/integration/test_canvas.py | 19 ++++++------ t/integration/test_tasks.py | 11 +++++-- 9 files changed, 79 insertions(+), 24 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index bb2ed26d003..e49116c95db 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -62,7 +62,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-unit" - timeout-minutes: 20 + timeout-minutes: 25 run: | tox --verbose --verbose @@ -72,3 +72,47 @@ jobs: fail_ci_if_error: true # optional (default = false) verbose: true # optional (default = false) + Integration: + needs: + - Unit + if: needs.Unit.result == 'success' + timeout-minutes: 240 + + runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + python-version: ['3.7', '3.8', '3.9', '3.10'] + toxenv: ['redis'] + services: + redis: + image: redis + ports: + - 6379:6379 + env: + REDIS_HOST: localhost + REDIS_PORT: 6379 + + steps: + - name: Install apt packages + run: | + sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Install tox + run: python -m pip install tox + - name: > + Run tox for + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" + timeout-minutes: 25 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv diff --git a/celery/app/task.py b/celery/app/task.py index de25715fc55..db47ab202f6 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -604,7 +604,7 @@ def signature_from_request(self, request=None, args=None, kwargs=None, request = self.request if request is None else request args = request.args if args is None else args kwargs = request.kwargs if kwargs is None else kwargs - options = request.as_execution_options() + options = {**request.as_execution_options(), **extra_options} delivery_info = request.delivery_info or {} priority = delivery_info.get('priority') if priority is not None: diff --git a/celery/canvas.py b/celery/canvas.py index a013ba4e9ed..3d92a4e0f55 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -26,7 +26,7 @@ from celery.utils.collections import ChainMap from celery.utils.functional import _regen from celery.utils.functional import chunks as _chunks -from celery.utils.functional import is_list, lookahead, maybe_list, regen, seq_concat_item, seq_concat_seq +from celery.utils.functional import is_list, maybe_list, regen, seq_concat_item, seq_concat_seq from celery.utils.objects import getitem_property from celery.utils.text import remove_repeating_from_task, truncate @@ -1183,9 +1183,11 @@ def _apply_tasks(self, tasks, producer=None, app=None, p=None, # next_task is None. This enables us to set the chord size # without burning through the entire generator. See #3021. chord_size = 0 - for task_index, (current_task, next_task) in enumerate( - lookahead(tasks) - ): + tasks_shifted, tasks = itertools.tee(tasks) + next(tasks_shifted, None) + next_task = next(tasks_shifted, None) + + for task_index, current_task in enumerate(tasks): # We expect that each task must be part of the same group which # seems sensible enough. If that's somehow not the case we'll # end up messing up chord counts and there are all sorts of @@ -1211,6 +1213,7 @@ def _apply_tasks(self, tasks, producer=None, app=None, p=None, if p and not p.cancelled and not p.ready: p.size += 1 res.then(p, weak=True) + next_task = next(tasks_shifted, None) yield res # <-- r.parent, etc set in the frozen result. def _freeze_gid(self, options): @@ -1248,7 +1251,7 @@ def _freeze_group_tasks(self, _id=None, group_id=None, chord=None, # we freeze all tasks in the clone tasks1, and then zip the results # with the IDs of tasks in the second clone, tasks2. and then, we build # a generator that takes only the task IDs from tasks2. - self.tasks = regen(x[0] for x in zip(tasks2, results)) + self.tasks = regen(tasks2) else: new_tasks = [] # Need to unroll subgroups early so that chord gets the diff --git a/celery/contrib/pytest.py b/celery/contrib/pytest.py index 858e4e5c447..fae69fc5368 100644 --- a/celery/contrib/pytest.py +++ b/celery/contrib/pytest.py @@ -98,7 +98,7 @@ def celery_session_worker( for module in celery_includes: celery_session_app.loader.import_task_module(module) for class_task in celery_class_tasks: - celery_session_app.tasks.register(class_task) + celery_session_app.register_task(class_task) with worker.start_worker(celery_session_app, pool=celery_worker_pool, **celery_worker_parameters) as w: diff --git a/celery/worker/request.py b/celery/worker/request.py index b9fcb14bc67..4e4ae803ca6 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -154,7 +154,7 @@ def __init__(self, message, on_ack=noop, 'exchange': delivery_info.get('exchange'), 'routing_key': delivery_info.get('routing_key'), 'priority': properties.get('priority'), - 'redelivered': delivery_info.get('redelivered'), + 'redelivered': delivery_info.get('redelivered', False), } self._request_dict.update({ 'properties': properties, diff --git a/requirements/test-integration.txt b/requirements/test-integration.txt index ab2958d21ff..545143cf174 100644 --- a/requirements/test-integration.txt +++ b/requirements/test-integration.txt @@ -3,3 +3,4 @@ -r extras/auth.txt -r extras/memcache.txt pytest-rerunfailures>=6.0 +git+https://github.com/celery/kombu.git diff --git a/t/integration/tasks.py b/t/integration/tasks.py index c8edb01d977..1e2b8047bd7 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -197,16 +197,17 @@ def retry(self, return_value=None): raise self.retry(exc=ExpectedException(), countdown=5) -@shared_task(bind=True, expires=60.0, max_retries=1) -def retry_once(self, *args, expires=60.0, max_retries=1, countdown=0.1): +@shared_task(bind=True, expires=120.0, max_retries=1) +def retry_once(self, *args, expires=None, max_retries=1, countdown=0.1): """Task that fails and is retried. Returns the number of retries.""" if self.request.retries: return self.request.retries raise self.retry(countdown=countdown, + expires=expires, max_retries=max_retries) -@shared_task(bind=True, expires=60.0, max_retries=1) +@shared_task(bind=True, max_retries=1) def retry_once_priority(self, *args, expires=60.0, max_retries=1, countdown=0.1): """Task that fails and is retried. Returns the priority.""" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index e73c0edb172..a5d4a46f0df 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -120,7 +120,7 @@ def test_link_error_callback_error_callback_retries_eager(self): ) assert result.get(timeout=TIMEOUT, propagate=False) == exception - @flaky + @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception") def test_link_error_callback_retries(self): exception = ExpectedException("Task expected to fail", "test") result = fail.apply_async( @@ -140,7 +140,7 @@ def test_link_error_using_signature_eager(self): assert (fail.apply().get(timeout=TIMEOUT, propagate=False), True) == ( exception, True) - @flaky + @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception") def test_link_error_using_signature(self): fail = signature('t.integration.tasks.fail', args=("test",)) retrun_exception = signature('t.integration.tasks.return_exception') @@ -175,7 +175,7 @@ def test_complex_chain(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [64, 65, 66, 67] - @flaky + @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout") def test_group_results_in_chain(self, manager): # This adds in an explicit test for the special case added in commit # 1e3fcaa969de6ad32b52a3ed8e74281e5e5360e6 @@ -473,7 +473,7 @@ def test_chain_of_a_chord_and_three_tasks_and_a_group(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [8, 8] - @flaky + @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout") def test_nested_chain_group_lone(self, manager): """ Test that a lone group in a chain completes. @@ -1229,7 +1229,7 @@ def apply_chord_incr_with_sleep(self, *args, **kwargs): result = c() assert result.get(timeout=TIMEOUT) == 4 - @flaky + @pytest.mark.xfail(reason="async_results aren't performed in async way") def test_redis_subscribed_channels_leak(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') @@ -1562,11 +1562,12 @@ def test_chord_on_error(self, manager): ) == 1 @flaky - def test_generator(self, manager): + @pytest.mark.parametrize('size', [3, 4, 5, 6, 7, 8, 9]) + def test_generator(self, manager, size): def assert_generator(file_name): - for i in range(3): + for i in range(size): sleep(1) - if i == 2: + if i == size - 1: with open(file_name) as file_handle: # ensures chord header generators tasks are processed incrementally #3021 assert file_handle.readline() == '0\n', "Chord header was unrolled too early" @@ -1575,7 +1576,7 @@ def assert_generator(file_name): with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file: file_name = tmp_file.name c = chord(assert_generator(file_name), tsum.s()) - assert c().get(timeout=TIMEOUT) == 3 + assert c().get(timeout=TIMEOUT) == size * (size - 1) // 2 @flaky def test_parallel_chords(self, manager): diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index cc2c6761b7d..c4289d4e09c 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -27,7 +27,7 @@ class test_class_based_tasks: def test_class_based_task_retried(self, celery_session_app, celery_session_worker): task = ClassBasedAutoRetryTask() - celery_session_app.tasks.register(task) + celery_session_app.register_task(task) res = task.delay() assert res.get(timeout=TIMEOUT) == 1 @@ -253,12 +253,17 @@ def test_task_accepted(self, manager, sleep=1): manager.assert_accepted([r1.id]) @flaky - def test_task_retried(self): + def test_task_retried_once(self, manager): res = retry_once.delay() assert res.get(timeout=TIMEOUT) == 1 # retried once @flaky - def test_task_retried_priority(self): + def test_task_retried_once_with_expires(self, manager): + res = retry_once.delay(expires=60) + assert res.get(timeout=TIMEOUT) == 1 # retried once + + @flaky + def test_task_retried_priority(self, manager): res = retry_once_priority.apply_async(priority=7) assert res.get(timeout=TIMEOUT) == 7 # retried once with priority 7 From 1a2db701873f748a440478e14993b83722790598 Mon Sep 17 00:00:00 2001 From: Mark Walker Date: Fri, 15 Apr 2022 23:55:52 +0100 Subject: [PATCH 0085/1051] docs: Move task sidebar blocks into main column [#7449] --- docs/userguide/tasks.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 2a86a5fe3b5..a1c7eae9603 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -113,7 +113,8 @@ these can be specified as arguments to the decorator: User.objects.create(username=username, password=password) -.. sidebar:: How do I import the task decorator? And what's "app"? +How do I import the task decorator? +----------------------------------- The task decorator is available on your :class:`@Celery` application instance, if you don't know what this is then please read :ref:`first-steps`. @@ -129,7 +130,8 @@ these can be specified as arguments to the decorator: def add(x, y): return x + y -.. sidebar:: Multiple decorators +Multiple decorators +------------------- When using multiple decorators in combination with the task decorator you must make sure that the `task` From 67c0dd0b8e00779f9c16e533cc2d50932379fc45 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 18 Apr 2022 18:04:32 +0600 Subject: [PATCH 0086/1051] try pymongo[srv]>=4.0.2 --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index b2264dfbbe2..899879c628d 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo[srv]>=3.11.1 +pymongo[srv]>=4.0.2 From ae20aa9d066899f9b394ec801ba70439fc6db0c1 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Mon, 18 Apr 2022 19:15:05 +0300 Subject: [PATCH 0087/1051] Ensure task compression actually happens when setting `task_compression` (#7470) * Ensure task compression actually happens when setting `task_compression`. Fixes #4838. Previously, we erroneously used `result_compression` as the configuration option for this behavior. It appears that compressing results was never supported in Celery or that the support for it was removed. This will be fixed later on. * Happify the linter. Co-authored-by: Omer Katz --- celery/app/amqp.py | 2 +- t/unit/app/test_amqp.py | 29 +++++++++++++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/celery/app/amqp.py b/celery/app/amqp.py index 777a1fc2c7c..06ce1d1b3c6 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -447,7 +447,7 @@ def _create_task_sender(self): default_rkey = self.app.conf.task_default_routing_key default_serializer = self.app.conf.task_serializer - default_compressor = self.app.conf.result_compression + default_compressor = self.app.conf.task_compression def send_task_message(producer, name, message, exchange=None, routing_key=None, queue=None, diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index bc2d26d3680..1010c4c64ce 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -205,8 +205,7 @@ def test_as_task_message_without_utc(self): self.app.amqp.as_task_v1(uuid(), 'foo', countdown=30, expires=40) -class test_AMQP: - +class test_AMQP_Base: def setup(self): self.simple_message = self.app.amqp.as_task_v2( uuid(), 'foo', create_sent_event=True, @@ -215,6 +214,9 @@ def setup(self): uuid(), 'foo', create_sent_event=False, ) + +class test_AMQP(test_AMQP_Base): + def test_kwargs_must_be_mapping(self): with pytest.raises(TypeError): self.app.amqp.as_task_v2(uuid(), 'foo', kwargs=[1, 2]) @@ -336,7 +338,7 @@ def update_conf_runtime_for_tasks_queues(self): assert router != router_was -class test_as_task_v2: +class test_as_task_v2(test_AMQP_Base): def test_raises_if_args_is_not_tuple(self): with pytest.raises(TypeError): @@ -368,8 +370,27 @@ def test_eta_to_datetime(self): ) assert m.headers['eta'] == eta.isoformat() - def test_callbacks_errbacks_chord(self): + def test_compression(self): + self.app.conf.task_compression = 'gzip' + + prod = Mock(name='producer') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, + compression=None + ) + assert prod.publish.call_args[1]['compression'] == 'gzip' + + def test_compression_override(self): + self.app.conf.task_compression = 'gzip' + + prod = Mock(name='producer') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, + compression='bz2' + ) + assert prod.publish.call_args[1]['compression'] == 'bz2' + def test_callbacks_errbacks_chord(self): @self.app.task def t(i): pass From 3f232e6f741359154e323a92c6c43f7c64f0fe8e Mon Sep 17 00:00:00 2001 From: dobosevych Date: Tue, 19 Apr 2022 12:02:06 +0300 Subject: [PATCH 0088/1051] Rabbitmq CI integration (#7472) * RabbitMQ CI * RabbitMQ first run * RabbitMQ first run * RabbitMQ first run * RabbitMQ + Redis test * All RabbitMQ tests * All RabbitMQ tests * fail_ci_if_error uncommented --- .github/workflows/python-package.yml | 12 +++++++++-- t/integration/test_canvas.py | 32 ++++++++++++++++++++++------ t/integration/test_tasks.py | 2 +- 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e49116c95db..ff5ba06ed39 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -83,7 +83,8 @@ jobs: fail-fast: false matrix: python-version: ['3.7', '3.8', '3.9', '3.10'] - toxenv: ['redis'] + toxenv: ['redis', 'rabbitmq'] + services: redis: image: redis @@ -92,6 +93,13 @@ jobs: env: REDIS_HOST: localhost REDIS_PORT: 6379 + rabbitmq: + image: rabbitmq + ports: + - 5672:5672 + env: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest steps: - name: Install apt packages @@ -112,7 +120,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - timeout-minutes: 25 + timeout-minutes: 50 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index a5d4a46f0df..6de4c3b766c 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -727,24 +727,30 @@ def test_chain_child_with_errback_replaced(self, manager, subtests): await_redis_count(1, redis_key=redis_key) redis_connection.delete(redis_key) - def test_task_replaced_with_chain(self): + @pytest.mark.xfail(raises=TimeoutError, + reason="Task is timeout instead of returning exception on rpc backend", + strict=False) + def test_task_replaced_with_chain(self, manager): orig_sig = replace_with_chain.si(42) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == 42 - def test_chain_child_replaced_with_chain_first(self): + def test_chain_child_replaced_with_chain_first(self, manager): orig_sig = chain(replace_with_chain.si(42), identity.s()) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == 42 - def test_chain_child_replaced_with_chain_middle(self): + def test_chain_child_replaced_with_chain_middle(self, manager): orig_sig = chain( identity.s(42), replace_with_chain.s(), identity.s() ) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == 42 - def test_chain_child_replaced_with_chain_last(self): + @pytest.mark.xfail(raises=TimeoutError, + reason="Task is timeout instead of returning exception on rpc backend", + strict=False) + def test_chain_child_replaced_with_chain_last(self, manager): orig_sig = chain(identity.s(42), replace_with_chain.s()) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == 42 @@ -1171,19 +1177,28 @@ def test_group_child_with_errback_replaced(self, manager, subtests): await_redis_count(1, redis_key=redis_key) redis_connection.delete(redis_key) - def test_group_child_replaced_with_chain_first(self): + @pytest.mark.xfail(raises=TimeoutError, + reason="Task is timeout instead of returning exception on rpc backend", + strict=False) + def test_group_child_replaced_with_chain_first(self, manager): orig_sig = group(replace_with_chain.si(42), identity.s(1337)) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == [42, 1337] - def test_group_child_replaced_with_chain_middle(self): + @pytest.mark.xfail(raises=TimeoutError, + reason="Task is timeout instead of returning exception on rpc backend", + strict=False) + def test_group_child_replaced_with_chain_middle(self, manager): orig_sig = group( identity.s(42), replace_with_chain.s(1337), identity.s(31337) ) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == [42, 1337, 31337] - def test_group_child_replaced_with_chain_last(self): + @pytest.mark.xfail(raises=TimeoutError, + reason="Task is timeout instead of returning exception on rpc backend", + strict=False) + def test_group_child_replaced_with_chain_last(self, manager): orig_sig = group(identity.s(42), replace_with_chain.s(1337)) res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == [42, 1337] @@ -1564,6 +1579,9 @@ def test_chord_on_error(self, manager): @flaky @pytest.mark.parametrize('size', [3, 4, 5, 6, 7, 8, 9]) def test_generator(self, manager, size): + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + def assert_generator(file_name): for i in range(size): sleep(1) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index c4289d4e09c..a7ee94ee40d 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -220,7 +220,7 @@ def test_wrong_arguments(self, manager): result.get(timeout=5) assert result.status == 'FAILURE' - @flaky + @pytest.mark.xfail(reason="Retry failed on rpc backend", strict=False) def test_retry(self, manager): """Tests retrying of task.""" # Tests when max. retries is reached From 0a3487b882398ef44d3be34a57b8354e986511af Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 19 Apr 2022 23:05:26 -0700 Subject: [PATCH 0089/1051] Update sphinx-click to 4.0.3 (#7473) * Update sphinx-click from 4.0.2 to 4.0.3 * sphinx-click~=4.0.3 Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index d1d245930d4..9a268ca733b 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ -sphinx_celery==2.0.0 +sphinx_celery~=2.0.0 Sphinx>=3.0.0 -sphinx-testing==1.0.1 -sphinx-click==4.0.2 +sphinx-testing~=1.0.1 +sphinx-click~=4.0.3 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From 6484ea0a4d064c172eb5b3fe09ac7b4cc5ad7862 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 21 Apr 2022 12:25:04 -0400 Subject: [PATCH 0090/1051] Use inspect.getgeneratorstate in asynpool.gen_not_started (#7476) * Use inspect.getgeneratorstate in asynpool.gen_not_started This improves compatibility with the nogil Python fork, which does not have the gi_frame attribute on generators. * Add additional tests for gen_not_started Checks that gen_not_started is not true while the generator is running and after the generator has exited due to an exception. --- celery/concurrency/asynpool.py | 4 ++-- t/unit/concurrency/test_prefork.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 489336936c1..28a1e09b80c 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -14,6 +14,7 @@ """ import errno import gc +import inspect import os import select import time @@ -89,8 +90,7 @@ def unpack_from(fmt, iobuf, unpack=unpack): # noqa def gen_not_started(gen): """Return true if generator is not started.""" - # gi_frame is None when generator stopped. - return gen.gi_frame and gen.gi_frame.f_lasti == -1 + return inspect.getgeneratorstate(gen) == "GEN_CREATED" def _get_job_writer(job): diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index 10ed121278e..194dec78aea 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -201,6 +201,7 @@ def test_gen_not_started(self): def gen(): yield 1 + assert not asynpool.gen_not_started(g) yield 2 g = gen() assert asynpool.gen_not_started(g) @@ -209,6 +210,17 @@ def gen(): list(g) assert not asynpool.gen_not_started(g) + def gen2(): + yield 1 + raise RuntimeError('generator error') + g = gen2() + assert asynpool.gen_not_started(g) + next(g) + assert not asynpool.gen_not_started(g) + with pytest.raises(RuntimeError): + next(g) + assert not asynpool.gen_not_started(g) + @patch('select.select', create=True) def test_select(self, __select): ebadf = socket.error() From 8d35c655d6ac408023da5e30ca81bc834e68bca0 Mon Sep 17 00:00:00 2001 From: Stefano Lottini Date: Thu, 21 Apr 2022 19:39:48 +0200 Subject: [PATCH 0091/1051] Extend cassandra to cover AstraDB as well (#7356) * Cassandra backend: bumped driver to v3.24 to support Astra DB, adapted the backend code to that effect, introduced new setting `cassandra_secure_bundle_path` and updated the documentation to reflect this. * edits to docs - configuration for cassandra * Update requirements/extras/cassandra.txt Co-authored-by: Asif Saif Uddin * Update README.rst Co-authored-by: Omer Katz * Cassandra backend for Astra: more test coverage, more docs, driver version bumped Co-authored-by: Stefano Lottini Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz --- CONTRIBUTING.rst | 2 +- README.rst | 2 +- celery/app/defaults.py | 1 + celery/backends/cassandra.py | 42 ++++++++--- docs/includes/installation.txt | 2 +- docs/userguide/configuration.rst | 111 +++++++++++++++++++++++++++--- requirements/extras/cassandra.txt | 2 +- t/unit/backends/test_cassandra.py | 39 ++++++++++- 8 files changed, 179 insertions(+), 22 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index c96ee55fb1e..46424cf8571 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1170,7 +1170,7 @@ that require third-party libraries must be added. .. code-block:: console - $ pip install -U requirements/pkgutils.txt + $ pip install -U -r requirements/pkgutils.txt $ make readme diff --git a/README.rst b/README.rst index ed771180f22..7fbea1028fb 100644 --- a/README.rst +++ b/README.rst @@ -307,7 +307,7 @@ Transports and Backends for using Memcached as a result backend (pure-Python implementation). :``celery[cassandra]``: - for using Apache Cassandra as a result backend with DataStax driver. + for using Apache Cassandra/Astra DB as a result backend with the DataStax driver. :``celery[azureblockblob]``: for using Azure Storage as a result backend (using ``azure-storage``) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 102302f66cc..29ce4ee77f6 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -114,6 +114,7 @@ def __repr__(self): port=Option(type='string'), read_consistency=Option(type='string'), servers=Option(type='list'), + bundle_path=Option(type='string'), table=Option(type='string'), write_consistency=Option(type='string'), auth_provider=Option(type='string'), diff --git a/celery/backends/cassandra.py b/celery/backends/cassandra.py index bf4f69c2753..c80aa5ca040 100644 --- a/celery/backends/cassandra.py +++ b/celery/backends/cassandra.py @@ -30,6 +30,10 @@ See https://datastax.github.io/python-driver/api/cassandra/auth.html. """ +E_CASSANDRA_MISCONFIGURED = 'Cassandra backend improperly configured.' + +E_CASSANDRA_NOT_CONFIGURED = 'Cassandra backend not configured.' + Q_INSERT_RESULT = """ INSERT INTO {table} ( task_id, status, result, date_done, traceback, children) VALUES ( @@ -65,21 +69,24 @@ def buf_t(x): class CassandraBackend(BaseBackend): - """Cassandra backend utilizing DataStax driver. + """Cassandra/AstraDB backend utilizing DataStax driver. Raises: celery.exceptions.ImproperlyConfigured: if module :pypi:`cassandra-driver` is not available, - or if the :setting:`cassandra_servers` setting is not set. + or not-exactly-one of the :setting:`cassandra_servers` and + the :setting:`cassandra_secure_bundle_path` settings is set. """ #: List of Cassandra servers with format: ``hostname``. servers = None + #: Location of the secure connect bundle zipfile (absolute path). + bundle_path = None supports_autoexpire = True # autoexpire supported via entry_ttl def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None, - port=9042, **kwargs): + port=9042, bundle_path=None, **kwargs): super().__init__(**kwargs) if not cassandra: @@ -87,13 +94,20 @@ def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None, conf = self.app.conf self.servers = servers or conf.get('cassandra_servers', None) + self.bundle_path = bundle_path or conf.get( + 'cassandra_secure_bundle_path', None) self.port = port or conf.get('cassandra_port', None) self.keyspace = keyspace or conf.get('cassandra_keyspace', None) self.table = table or conf.get('cassandra_table', None) self.cassandra_options = conf.get('cassandra_options', {}) - if not self.servers or not self.keyspace or not self.table: - raise ImproperlyConfigured('Cassandra backend not configured.') + # either servers or bundle path must be provided... + db_directions = self.servers or self.bundle_path + if not db_directions or not self.keyspace or not self.table: + raise ImproperlyConfigured(E_CASSANDRA_NOT_CONFIGURED) + # ...but not both: + if self.servers and self.bundle_path: + raise ImproperlyConfigured(E_CASSANDRA_MISCONFIGURED) expires = entry_ttl or conf.get('cassandra_entry_ttl', None) @@ -137,10 +151,20 @@ def _get_connection(self, write=False): try: if self._session is not None: return - self._cluster = cassandra.cluster.Cluster( - self.servers, port=self.port, - auth_provider=self.auth_provider, - **self.cassandra_options) + # using either 'servers' or 'bundle_path' here: + if self.servers: + self._cluster = cassandra.cluster.Cluster( + self.servers, port=self.port, + auth_provider=self.auth_provider, + **self.cassandra_options) + else: + # 'bundle_path' is guaranteed to be set + self._cluster = cassandra.cluster.Cluster( + cloud={ + 'secure_connect_bundle': self.bundle_path, + }, + auth_provider=self.auth_provider, + **self.cassandra_options) self._session = self._cluster.connect(self.keyspace) # We're forced to do concatenation below, as formatting would diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index 09887edbf0d..415d8933bfe 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -77,7 +77,7 @@ Transports and Backends for using Memcached as a result backend (pure-Python implementation). :``celery[cassandra]``: - for using Apache Cassandra as a result backend with DataStax driver. + for using Apache Cassandra/Astra DB as a result backend with DataStax driver. :``celery[couchbase]``: for using Couchbase as a result backend. diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 50dbf4d9394..81481aa3c88 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1314,13 +1314,19 @@ used by the redis result backend. .. _conf-cassandra-result-backend: -Cassandra backend settings --------------------------- +Cassandra/AstraDB backend settings +---------------------------------- .. note:: This Cassandra backend driver requires :pypi:`cassandra-driver`. + This backend can refer to either a regular Cassandra installation + or a managed Astra DB instance. Depending on which one, exactly one + between the :setting:`cassandra_servers` and + :setting:`cassandra_secure_bundle_path` settings must be provided + (but not both). + To install, use :command:`pip`: .. code-block:: console @@ -1339,10 +1345,32 @@ This backend requires the following configuration directives to be set. Default: ``[]`` (empty list). -List of ``host`` Cassandra servers. For example:: +List of ``host`` Cassandra servers. This must be provided when connecting to +a Cassandra cluster. Passing this setting is strictly exclusive +to :setting:`cassandra_secure_bundle_path`. Example:: cassandra_servers = ['localhost'] +.. setting:: cassandra_secure_bundle_path + +``cassandra_secure_bundle_path`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: None. + +Absolute path to the secure-connect-bundle zip file to connect +to an Astra DB instance. Passing this setting is strictly exclusive +to :setting:`cassandra_servers`. +Example:: + + cassandra_secure_bundle_path = '/home/user/bundles/secure-connect.zip' + +When connecting to Astra DB, it is necessary to specify +the plain-text auth provider and the associated username and password, +which take the value of the Client ID and the Client Secret, respectively, +of a valid token generated for the Astra DB instance. +See below for an Astra DB configuration example. + .. setting:: cassandra_port ``cassandra_port`` @@ -1359,7 +1387,7 @@ Port to contact the Cassandra servers on. Default: None. -The key-space in which to store the results. For example:: +The keyspace in which to store the results. For example:: cassandra_keyspace = 'tasks_keyspace' @@ -1446,18 +1474,85 @@ Named arguments to pass into the ``cassandra.cluster`` class. 'protocol_version': 3 } -Example configuration -~~~~~~~~~~~~~~~~~~~~~ +Example configuration (Cassandra) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: python + result_backend = 'cassandra://' cassandra_servers = ['localhost'] cassandra_keyspace = 'celery' cassandra_table = 'tasks' - cassandra_read_consistency = 'ONE' - cassandra_write_consistency = 'ONE' + cassandra_read_consistency = 'QUORUM' + cassandra_write_consistency = 'QUORUM' cassandra_entry_ttl = 86400 +Example configuration (Astra DB) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: python + + result_backend = 'cassandra://' + cassandra_keyspace = 'celery' + cassandra_table = 'tasks' + cassandra_read_consistency = 'QUORUM' + cassandra_write_consistency = 'QUORUM' + cassandra_auth_provider = 'PlainTextAuthProvider' + cassandra_auth_kwargs = { + 'username': '<>', + 'password': '<>' + } + cassandra_secure_bundle_path = '/path/to/secure-connect-bundle.zip' + cassandra_entry_ttl = 86400 + +Additional configuration +~~~~~~~~~~~~~~~~~~~~~~~~ + +The Cassandra driver, when estabilishing the connection, undergoes a stage +of negotiating the protocol version with the server(s). Similarly, +a load-balancing policy is automatically supplied (by default +``DCAwareRoundRobinPolicy``, which in turn has a ``local_dc`` setting, also +determined by the driver upon connection). +When possible, one should explicitly provide these in the configuration: +moreover, future versions of the Cassandra driver will require at least the +load-balancing policy to be specified (using `execution profiles `_, +as shown below). + +A full configuration for the Cassandra backend would thus have the +following additional lines: + +.. code-block:: python + + from cassandra.policies import DCAwareRoundRobinPolicy + from cassandra.cluster import ExecutionProfile + from cassandra.cluster import EXEC_PROFILE_DEFAULT + myEProfile = ExecutionProfile( + load_balancing_policy=DCAwareRoundRobinPolicy( + local_dc='datacenter1', # replace with your DC name + ) + ) + cassandra_options = { + 'protocol_version': 5, # for Cassandra 4, change if needed + 'execution_profiles': {EXEC_PROFILE_DEFAULT: myEProfile}, + } + +And similarly for Astra DB: + +.. code-block:: python + + from cassandra.policies import DCAwareRoundRobinPolicy + from cassandra.cluster import ExecutionProfile + from cassandra.cluster import EXEC_PROFILE_DEFAULT + myEProfile = ExecutionProfile( + load_balancing_policy=DCAwareRoundRobinPolicy( + local_dc='europe-west1', # for Astra DB, region name = dc name + ) + ) + cassandra_options = { + 'protocol_version': 4, # for Astra DB + 'execution_profiles': {EXEC_PROFILE_DEFAULT: myEProfile}, + } + .. _conf-s3-result-backend: S3 backend settings diff --git a/requirements/extras/cassandra.txt b/requirements/extras/cassandra.txt index 65465cbc1fa..b84a7360ace 100644 --- a/requirements/extras/cassandra.txt +++ b/requirements/extras/cassandra.txt @@ -1 +1 @@ -cassandra-driver<3.21.0 +cassandra-driver>=3.24.0,<4 diff --git a/t/unit/backends/test_cassandra.py b/t/unit/backends/test_cassandra.py index 5df53a1e576..75d8818bcd1 100644 --- a/t/unit/backends/test_cassandra.py +++ b/t/unit/backends/test_cassandra.py @@ -53,13 +53,50 @@ def test_init_with_and_without_LOCAL_QUROM(self, module): cons.LOCAL_FOO = 'bar' mod.CassandraBackend(app=self.app) - # no servers raises ImproperlyConfigured + # no servers and no bundle_path raises ImproperlyConfigured with pytest.raises(ImproperlyConfigured): self.app.conf.cassandra_servers = None + self.app.conf.cassandra_secure_bundle_path = None mod.CassandraBackend( app=self.app, keyspace='b', column_family='c', ) + # both servers no bundle_path raises ImproperlyConfigured + with pytest.raises(ImproperlyConfigured): + self.app.conf.cassandra_servers = ['localhost'] + self.app.conf.cassandra_secure_bundle_path = ( + '/home/user/secure-connect-bundle.zip') + mod.CassandraBackend( + app=self.app, keyspace='b', column_family='c', + ) + + def test_init_with_cloud(self): + # Tests behavior when Cluster.connect works properly + # and cluster is created with 'cloud' param instead of 'contact_points' + from celery.backends import cassandra as mod + + class DummyClusterWithBundle: + + def __init__(self, *args, **kwargs): + if args != (): + # this cluster is supposed to be created with 'cloud=...' + raise ValueError('I should be created with kwargs only') + pass + + def connect(self, *args, **kwargs): + return Mock() + + mod.cassandra = Mock() + mod.cassandra.cluster = Mock() + mod.cassandra.cluster.Cluster = DummyClusterWithBundle + + self.app.conf.cassandra_secure_bundle_path = '/path/to/bundle.zip' + self.app.conf.cassandra_servers = None + + x = mod.CassandraBackend(app=self.app) + x._get_connection() + assert isinstance(x._cluster, DummyClusterWithBundle) + @pytest.mark.patched_module(*CASSANDRA_MODULES) @pytest.mark.usefixtures('depends_on_current_app') def test_reduce(self, module): From fe0e50e5cd864cdaa46f6f255ced0168aa8042a8 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 22 Apr 2022 16:43:43 +0600 Subject: [PATCH 0092/1051] update actions v3+ (#7477) --- .github/workflows/python-package.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ff5ba06ed39..8851737e63d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -38,9 +38,9 @@ jobs: if: startsWith(matrix.os, 'ubuntu-') run: | sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2.2.2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} @@ -49,7 +49,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" - name: Cache - uses: actions/cache@v2.1.6 + uses: actions/cache@v3.0.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: @@ -66,7 +66,7 @@ jobs: run: | tox --verbose --verbose - - uses: codecov/codecov-action@v2.1.0 + - uses: codecov/codecov-action@v3.1.0 with: flags: unittests # optional fail_ci_if_error: true # optional (default = false) @@ -105,9 +105,9 @@ jobs: - name: Install apt packages run: | sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} From 12a4afa2116547a1abc8dc2c12f9f64a65b943f9 Mon Sep 17 00:00:00 2001 From: Simon Mazenoux Date: Fri, 22 Apr 2022 14:56:39 +0200 Subject: [PATCH 0093/1051] Fix test with missing .get() (#7479) --- docs/userguide/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index 3f2f15ba680..a938aec70ca 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -346,7 +346,7 @@ Example: # Do this in your tests. def test_add_task(celery_session_worker): - assert add.delay(2, 2) == 4 + assert add.delay(2, 2).get() == 4 .. warning:: From 969e36a8d6823dff88fce2669cfcb59de7275a3d Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sat, 23 Apr 2022 03:18:48 -0700 Subject: [PATCH 0094/1051] Config file for pyup.io (#7142) * create pyup.io config file * update Co-authored-by: Asif Saif Uddin --- .pyup.yml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .pyup.yml diff --git a/.pyup.yml b/.pyup.yml new file mode 100644 index 00000000000..0218aef3410 --- /dev/null +++ b/.pyup.yml @@ -0,0 +1,5 @@ +# autogenerated pyup.io config file +# see https://pyup.io/docs/configuration/ for all available options + +schedule: "every week" +update: all From ab2bcc096a9013a9147a3be1a2699d2312f93d1f Mon Sep 17 00:00:00 2001 From: code-review-doctor Date: Sun, 24 Apr 2022 01:16:42 +0100 Subject: [PATCH 0095/1051] Fix issue probably-meant-fstring found at https://codereview.doctor --- celery/backends/dynamodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index 4fbd9aaf7d7..7c2f1ca5b39 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -128,7 +128,7 @@ def __init__(self, url=None, table_name=None, *args, **kwargs): self.time_to_live_seconds = int(ttl) except ValueError as e: logger.error( - 'TTL must be a number; got "{ttl}"', + f'TTL must be a number; got "{ttl}"', exc_info=e ) raise e From 4fe9d8be3c646866ea2c35c255b47240d1f99698 Mon Sep 17 00:00:00 2001 From: Antoine van der Horst Date: Tue, 26 Apr 2022 11:22:25 +0200 Subject: [PATCH 0096/1051] Fix eventlet example from not running. --- examples/eventlet/README.rst | 6 ++---- examples/eventlet/celeryconfig.py | 1 - examples/eventlet/tasks.py | 5 +++-- examples/eventlet/webcrawler.py | 6 +++--- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/examples/eventlet/README.rst b/examples/eventlet/README.rst index 84a1856f314..a16f48e65cf 100644 --- a/examples/eventlet/README.rst +++ b/examples/eventlet/README.rst @@ -10,9 +10,7 @@ This is a Celery application containing two example tasks. First you need to install Eventlet, and also recommended is the `dnspython` module (when this is installed all name lookups will be asynchronous):: - $ pip install eventlet - $ pip install dnspython - $ pip install requests + $ python -m pip install eventlet celery pybloom-live Before you run any of the example tasks you need to start the worker:: @@ -34,7 +32,7 @@ of the response body:: $ cd examples/eventlet $ python >>> from tasks import urlopen - >>> urlopen.delay('http://www.google.com/').get() + >>> urlopen.delay('https://www.google.com/').get() 9980 To open several URLs at once you can do:: diff --git a/examples/eventlet/celeryconfig.py b/examples/eventlet/celeryconfig.py index f63b7b1fb5b..88250114199 100644 --- a/examples/eventlet/celeryconfig.py +++ b/examples/eventlet/celeryconfig.py @@ -9,7 +9,6 @@ broker_url = 'amqp://guest:guest@localhost:5672//' worker_disable_rate_limits = True -result_backend = 'amqp' result_expires = 30 * 60 imports = ('tasks', 'webcrawler') diff --git a/examples/eventlet/tasks.py b/examples/eventlet/tasks.py index 0bb339bb31f..c20570d768e 100644 --- a/examples/eventlet/tasks.py +++ b/examples/eventlet/tasks.py @@ -1,13 +1,14 @@ import requests -from celery import task +from celery import shared_task -@task() +@shared_task() def urlopen(url): print(f'-open: {url}') try: response = requests.get(url) except requests.exceptions.RequestException as exc: print(f'-url {url} gave error: {exc!r}') + return return len(response.text) diff --git a/examples/eventlet/webcrawler.py b/examples/eventlet/webcrawler.py index 617e9187567..f95934e896b 100644 --- a/examples/eventlet/webcrawler.py +++ b/examples/eventlet/webcrawler.py @@ -24,9 +24,9 @@ import requests from eventlet import Timeout -from pybloom import BloomFilter +from pybloom_live import BloomFilter -from celery import group, task +from celery import group, shared_task try: from urllib.parse import urlsplit @@ -43,7 +43,7 @@ def domain(url): return urlsplit(url)[1].split(':')[0] -@task(ignore_result=True, serializer='pickle', compression='zlib') +@shared_task(ignore_result=True, serializer='pickle', compression='zlib') def crawl(url, seen=None): print(f'crawling: {url}') if not seen: From cd9fd692fe755b3dc624f72f6220606cf11cca5f Mon Sep 17 00:00:00 2001 From: Einatle1 <103060084+Einatle1@users.noreply.github.com> Date: Wed, 27 Apr 2022 14:26:22 +0300 Subject: [PATCH 0097/1051] Update issue templates --- .github/ISSUE_TEMPLATE/Bug-Report.md | 6 ++- .../Documentation-Bug-Report.md | 6 ++- .github/ISSUE_TEMPLATE/Enhancement.md | 6 ++- .github/ISSUE_TEMPLATE/Feature-Request.md | 6 ++- .../Major-Version-Release-Checklist.md | 4 ++ .../Minor-Version-Release-Checklist.md | 4 ++ .github/ISSUE_TEMPLATE/bug_report.md | 38 +++++++++++++++++++ 7 files changed, 66 insertions(+), 4 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md diff --git a/.github/ISSUE_TEMPLATE/Bug-Report.md b/.github/ISSUE_TEMPLATE/Bug-Report.md index 71f46e30d69..bdf95bffb5e 100644 --- a/.github/ISSUE_TEMPLATE/Bug-Report.md +++ b/.github/ISSUE_TEMPLATE/Bug-Report.md @@ -1,8 +1,12 @@ --- name: Bug Report about: Is something wrong with Celery? -labels: "Issue Type: Bug Report" +title: '' +labels: 'Issue Type: Bug Report' +assignees: '' + --- + diff --git a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md index c3656043b93..f6717b485c7 100644 --- a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md +++ b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md @@ -1,6 +1,10 @@ --- name: Minor Version Release Checklist about: About to release a new minor version? (Maintainers Only!) +title: '' +labels: '' +assignees: '' + --- Version: diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..dd84ea7824f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. From 5d58627ec3146fb45e5667c05c11154731e72792 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 28 Apr 2022 12:44:32 +0600 Subject: [PATCH 0098/1051] azure-storage-blob>=12.11.0 --- requirements/extras/azureblockblob.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/azureblockblob.txt b/requirements/extras/azureblockblob.txt index a9208b97325..a0088f759cb 100644 --- a/requirements/extras/azureblockblob.txt +++ b/requirements/extras/azureblockblob.txt @@ -1 +1 @@ -azure-storage-blob==12.9.0 +azure-storage-blob>=12.11.0 From 06141bd524b23f402417af64415f6c8d94aad789 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20BRESSY?= Date: Fri, 29 Apr 2022 12:08:33 +0200 Subject: [PATCH 0099/1051] Update old link to new website https://docs.celeryq.dev/en/latest/userguide/tasks.html\ #avoid-launching-synchronous-subtasks --- celery/result.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/result.py b/celery/result.py index 2f468fc60cb..ecbe17cb569 100644 --- a/celery/result.py +++ b/celery/result.py @@ -28,8 +28,8 @@ E_WOULDBLOCK = """\ Never call result.get() within a task! -See http://docs.celeryq.org/en/latest/userguide/tasks.html\ -#task-synchronous-subtasks +See https://docs.celeryq.dev/en/latest/userguide/tasks.html\ +#avoid-launching-synchronous-subtasks """ From 4ef92cf0d439d82b4f8ac66bda543e09a2724af7 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 1 May 2022 15:08:23 +0600 Subject: [PATCH 0100/1051] modify libs and os (#7504) * modify libs and os * increase timeout mins * sudo apt update && --- .github/workflows/python-package.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8851737e63d..4d2da0ecd66 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false matrix: python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7', 'pypy-3.8'] - os: ["ubuntu-20.04", "windows-latest"] + os: ["ubuntu-latest", "windows-latest"] exclude: - python-version: 'pypy-3.7' os: "windows-latest" @@ -37,7 +37,7 @@ jobs: - name: Install apt packages if: startsWith(matrix.os, 'ubuntu-') run: | - sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev + sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v3 @@ -62,7 +62,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-unit" - timeout-minutes: 25 + timeout-minutes: 30 run: | tox --verbose --verbose @@ -78,7 +78,7 @@ jobs: if: needs.Unit.result == 'success' timeout-minutes: 240 - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: fail-fast: false matrix: @@ -104,7 +104,7 @@ jobs: steps: - name: Install apt packages run: | - sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev + sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v3 @@ -120,7 +120,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - timeout-minutes: 50 + timeout-minutes: 60 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv From 128f0027005f2bf9d4b93082049d2c96c2bcd879 Mon Sep 17 00:00:00 2001 From: Yonathan Randolph Date: Sat, 30 Apr 2022 21:27:25 -0700 Subject: [PATCH 0101/1051] Make start_worker, setup_default_app reusable outside of pytest start_worker and setup_default_app are generator functions wrapped in @contextmanager. Generally, @contextmanager requires the yield statement to be wrapped in a try-finally statement to guarantee cleanup. This is not an issue if these functions are only called from @pytest.fixture, which never passes exceptions to the generator. But to use these context managers outside of @pytest.fixture, they need to use the more general try-finally pattern so that they do not hang. --- celery/contrib/testing/app.py | 29 ++++++++------- celery/contrib/testing/worker.py | 63 +++++++++++++++++--------------- t/unit/contrib/test_worker.py | 56 ++++++++++++++++++++++++++++ 3 files changed, 106 insertions(+), 42 deletions(-) create mode 100644 t/unit/contrib/test_worker.py diff --git a/celery/contrib/testing/app.py b/celery/contrib/testing/app.py index 274e5d12e0b..95ed700b8ec 100644 --- a/celery/contrib/testing/app.py +++ b/celery/contrib/testing/app.py @@ -80,8 +80,10 @@ class NonTLS: current_app = trap _state._tls = NonTLS() - yield - _state._tls = prev_tls + try: + yield + finally: + _state._tls = prev_tls @contextmanager @@ -95,15 +97,16 @@ def setup_default_app(app, use_trap=False): prev_finalizers = set(_state._on_app_finalizers) prev_apps = weakref.WeakSet(_state._apps) - if use_trap: - with set_trap(app): + try: + if use_trap: + with set_trap(app): + yield + else: yield - else: - yield - - _state.set_default_app(prev_default_app) - _state._tls.current_app = prev_current_app - if app is not prev_current_app: - app.close() - _state._on_app_finalizers = prev_finalizers - _state._apps = prev_apps + finally: + _state.set_default_app(prev_default_app) + _state._tls.current_app = prev_current_app + if app is not prev_current_app: + app.close() + _state._on_app_finalizers = prev_finalizers + _state._apps = prev_apps diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index 8467f85f3b4..c72dc0e4006 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -72,21 +72,23 @@ def start_worker( """ test_worker_starting.send(sender=app) - with _start_worker_thread(app, - concurrency=concurrency, - pool=pool, - loglevel=loglevel, - logfile=logfile, - perform_ping_check=perform_ping_check, - shutdown_timeout=shutdown_timeout, - **kwargs) as worker: - if perform_ping_check: - from .tasks import ping - with allow_join_result(): - assert ping.delay().get(timeout=ping_task_timeout) == 'pong' - - yield worker - test_worker_stopped.send(sender=app, worker=worker) + try: + with _start_worker_thread(app, + concurrency=concurrency, + pool=pool, + loglevel=loglevel, + logfile=logfile, + perform_ping_check=perform_ping_check, + shutdown_timeout=shutdown_timeout, + **kwargs) as worker: + if perform_ping_check: + from .tasks import ping + with allow_join_result(): + assert ping.delay().get(timeout=ping_task_timeout) == 'pong' + + yield worker + finally: + test_worker_stopped.send(sender=app, worker=worker) @contextmanager @@ -131,18 +133,19 @@ def _start_worker_thread(app, worker.ensure_started() _set_task_join_will_block(False) - yield worker - - from celery.worker import state - state.should_terminate = 0 - t.join(shutdown_timeout) - if t.is_alive(): - raise RuntimeError( - "Worker thread failed to exit within the allocated timeout. " - "Consider raising `shutdown_timeout` if your tasks take longer " - "to execute." - ) - state.should_terminate = None + try: + yield worker + finally: + from celery.worker import state + state.should_terminate = 0 + t.join(shutdown_timeout) + if t.is_alive(): + raise RuntimeError( + "Worker thread failed to exit within the allocated timeout. " + "Consider raising `shutdown_timeout` if your tasks take longer " + "to execute." + ) + state.should_terminate = None @contextmanager @@ -163,8 +166,10 @@ def _start_worker_process(app, app.set_current() cluster = Cluster([Node('testworker1@%h')]) cluster.start() - yield - cluster.stopwait() + try: + yield + finally: + cluster.stopwait() def setup_app_for_worker(app, loglevel, logfile) -> None: diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py new file mode 100644 index 00000000000..ad4efdb5529 --- /dev/null +++ b/t/unit/contrib/test_worker.py @@ -0,0 +1,56 @@ +import pytest + +from celery import Celery +from celery.contrib.testing.worker import start_worker + +app = Celery('celerytest', + backend='cache+memory://', + broker='memory://', + ) + + +@app.task +def add(x, y): + return x + y + + +def test_start_worker(): + app.config_from_object({ + 'worker_hijack_root_logger': False, + }) + # this import adds a @shared_task, which uses connect_on_app_finalize + # to install the celery.ping task that the test lib uses + import celery.contrib.testing.tasks # noqa: F401 + + # to avoid changing the root logger level to ERROR, + # we have we have to set both app.log.loglevel start_worker arg to 0 + # (see celery.app.log.setup_logging_subsystem) + app.log.loglevel = 0 + with start_worker(app=app, loglevel=0): + result = add.s(1, 2).apply_async() + val = result.get(timeout=5) + assert val == 3 + + +@app.task +def error_task(): + raise NotImplementedError() + + +def test_start_worker_with_exception(): + """Make sure that start_worker does not hang on exception""" + app.config_from_object({ + 'worker_hijack_root_logger': False, + }) + # this import adds a @shared_task, which uses connect_on_app_finalize + # to install the celery.ping task that the test lib uses + import celery.contrib.testing.tasks # noqa: F401 + + # to avoid changing the root logger level to ERROR, + # we have we have to set both app.log.loglevel start_worker arg to 0 + # (see celery.app.log.setup_logging_subsystem) + app.log.loglevel = 0 + with pytest.raises(NotImplementedError): + with start_worker(app=app, loglevel=0): + result = error_task.apply_async() + result.get(timeout=5) From ecda164228fe60cea841c536f89ee99dc4e1dcde Mon Sep 17 00:00:00 2001 From: Einatle1 <103060084+Einatle1@users.noreply.github.com> Date: Tue, 3 May 2022 19:24:26 +0300 Subject: [PATCH 0102/1051] Create Issueform.yaml --- Issueform.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 Issueform.yaml diff --git a/Issueform.yaml b/Issueform.yaml new file mode 100644 index 00000000000..a8ffc2b8239 --- /dev/null +++ b/Issueform.yaml @@ -0,0 +1,15 @@ +name: Bug report +description: test +body: +- type: dropdown + id: download + attributes: + label: How did you download the software? + options: + - Homebrew + - MacPorts + - apt-get + - Built from source + validations: + required: true + From 7d68f16a38d80aa9374e00eb130b13df4a6b3f18 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 3 May 2022 19:26:02 +0300 Subject: [PATCH 0103/1051] Delete bug_report.md --- .github/ISSUE_TEMPLATE/bug_report.md | 38 ---------------------------- 1 file changed, 38 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index dd84ea7824f..00000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. From de711909691b73529883f2b7b8cbdbcdc7cbffc7 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 3 May 2022 19:27:29 +0300 Subject: [PATCH 0104/1051] Rename Issueform.yaml to .github/ISSUE_TEMPLATE/Issueform.yaml --- Issueform.yaml => .github/ISSUE_TEMPLATE/Issueform.yaml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Issueform.yaml => .github/ISSUE_TEMPLATE/Issueform.yaml (100%) diff --git a/Issueform.yaml b/.github/ISSUE_TEMPLATE/Issueform.yaml similarity index 100% rename from Issueform.yaml rename to .github/ISSUE_TEMPLATE/Issueform.yaml From 53d79425725dd869f37fe652f26813e1eca26af6 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 3 May 2022 19:28:14 +0300 Subject: [PATCH 0105/1051] Delete Issueform.yaml --- .github/ISSUE_TEMPLATE/Issueform.yaml | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/Issueform.yaml diff --git a/.github/ISSUE_TEMPLATE/Issueform.yaml b/.github/ISSUE_TEMPLATE/Issueform.yaml deleted file mode 100644 index a8ffc2b8239..00000000000 --- a/.github/ISSUE_TEMPLATE/Issueform.yaml +++ /dev/null @@ -1,15 +0,0 @@ -name: Bug report -description: test -body: -- type: dropdown - id: download - attributes: - label: How did you download the software? - options: - - Homebrew - - MacPorts - - apt-get - - Built from source - validations: - required: true - From 850ffbd71a65c370616164f5684b53b59e58fbf0 Mon Sep 17 00:00:00 2001 From: Tim Tisdall Date: Fri, 6 May 2022 09:37:50 -0400 Subject: [PATCH 0106/1051] fix undefined variable in retry example code --- docs/userguide/tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index a1c7eae9603..3712b16b7b8 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -704,7 +704,7 @@ in a :keyword:`try` ... :keyword:`except` statement: try: twitter.refresh_timeline(user) except FailWhaleError as exc: - raise div.retry(exc=exc, max_retries=5) + raise refresh_timeline.retry(exc=exc, max_retries=5) If you want to automatically retry on any error, simply use: From f1073e6682ddf6c1d88bb3c0d5a456124b43bf7c Mon Sep 17 00:00:00 2001 From: Tim Tisdall Date: Thu, 7 Apr 2022 08:38:27 -0400 Subject: [PATCH 0107/1051] revert celery#5941 so note below makes sense again --- celery/app/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/task.py b/celery/app/task.py index db47ab202f6..f5a653e278a 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -639,7 +639,7 @@ def retry(self, args=None, kwargs=None, exc=None, throw=True, ... twitter.post_status_update(message) ... except twitter.FailWhale as exc: ... # Retry in 5 minutes. - ... self.retry(countdown=60 * 5, exc=exc) + ... raise self.retry(countdown=60 * 5, exc=exc) Note: Although the task will never return above as `retry` raises an From 2fda4e50c458aeee9d007d21a53261828549460a Mon Sep 17 00:00:00 2001 From: dobosevych Date: Mon, 9 May 2022 19:28:42 +0300 Subject: [PATCH 0108/1051] Fix incompability with new couchbase version (#7518) * Fix incompability with new couchbase version * Fixed flake8 --- celery/backends/couchbase.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/celery/backends/couchbase.py b/celery/backends/couchbase.py index 9ed594c4826..25f729f1961 100644 --- a/celery/backends/couchbase.py +++ b/celery/backends/couchbase.py @@ -9,10 +9,14 @@ try: from couchbase.auth import PasswordAuthenticator from couchbase.cluster import Cluster, ClusterOptions - from couchbase_core._libcouchbase import FMT_AUTO except ImportError: Cluster = PasswordAuthenticator = ClusterOptions = None +try: + from couchbase_core._libcouchbase import FMT_AUTO +except ImportError: + FMT_AUTO = None + __all__ = ('CouchbaseBackend',) @@ -97,7 +101,11 @@ def get(self, key): return self.connection.get(key).content def set(self, key, value): - self.connection.upsert(key, value, ttl=self.expires, format=FMT_AUTO) + # Since 4.0.0 value is JSONType in couchbase lib, so parameter format isn't needed + if FMT_AUTO is not None: + self.connection.upsert(key, value, ttl=self.expires, format=FMT_AUTO) + else: + self.connection.upsert(key, value, ttl=self.expires) def mget(self, keys): return self.connection.get_multi(keys) From b9d949aa9e121140b56df8d11c8c65a7df70a0e2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 May 2022 16:47:52 +0000 Subject: [PATCH 0109/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.32.0 → v2.32.1](https://github.com/asottile/pyupgrade/compare/v2.32.0...v2.32.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f667ad0f237..f70b1e4d643 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.32.0 + rev: v2.32.1 hooks: - id: pyupgrade args: ["--py37-plus"] From 57fa506da99c1f6d370b420261aaf70fcae8d8fa Mon Sep 17 00:00:00 2001 From: ymorgenstern <101975191+ymorgenstern@users.noreply.github.com> Date: Thu, 12 May 2022 14:15:01 +0300 Subject: [PATCH 0110/1051] docs: Linking a task to a group does *not* guarantee all group tasks will finish first The current docs do not make this clear, and this behavior does not always manifest, so it can be very surprising when it does. --- docs/userguide/canvas.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 81d9922e518..6f626ab56f4 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -698,6 +698,8 @@ the behaviour can be somewhat surprising due to the fact that groups are not real tasks and simply pass linked tasks down to their encapsulated signatures. This means that the return values of a group are not collected to be passed to a linked callback signature. +Additionally, linking the task will *not* guarantee that it will activate only +when all group tasks have finished. As an example, the following snippet using a simple `add(a, b)` task is faulty since the linked `add.s()` signature will not received the finalised group result as one might expect. From 1ec6d230bc195c8e7cac4fe855ddf068559ce527 Mon Sep 17 00:00:00 2001 From: Troy Swanson Date: Wed, 11 May 2022 15:50:11 -0500 Subject: [PATCH 0111/1051] Update error message to link to celeryq.dev --- celery/worker/consumer/consumer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index c9b820e4966..f74326c506f 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -75,7 +75,7 @@ Or maybe you're using relative imports? Please see -http://docs.celeryq.org/en/latest/internals/protocol.html +http://docs.celeryq.dev/en/latest/internals/protocol.html for more information. The full contents of the message body was: @@ -95,7 +95,7 @@ Please ensure your message conforms to the task message protocol as described here: -http://docs.celeryq.org/en/latest/internals/protocol.html +http://docs.celeryq.dev/en/latest/internals/protocol.html The full contents of the message body was: %s From 726b664840b6a1fcea9225b254a393e665363ad0 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Mon, 23 May 2022 09:44:40 +0300 Subject: [PATCH 0112/1051] Ensure a proper error message is raised when id for key is empty (#7447) * Ensure a proper error message is raised when id for key is empty. * Add test coverage. Co-authored-by: Omer Katz --- celery/backends/base.py | 22 +++++++++++++--------- t/unit/backends/test_base.py | 12 ++++++++++++ 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 20e890c7be5..5f76191b136 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -846,23 +846,27 @@ def expire(self, key, value): def get_key_for_task(self, task_id, key=''): """Get the cache key for a task by id.""" - key_t = self.key_t - return key_t('').join([ - self.task_keyprefix, key_t(task_id), key_t(key), - ]) + if not task_id: + raise ValueError(f'task_id must not be empty. Got {task_id} instead.') + return self._get_key_for(self.task_keyprefix, task_id, key) def get_key_for_group(self, group_id, key=''): """Get the cache key for a group by id.""" - key_t = self.key_t - return key_t('').join([ - self.group_keyprefix, key_t(group_id), key_t(key), - ]) + if not group_id: + raise ValueError(f'group_id must not be empty. Got {group_id} instead.') + return self._get_key_for(self.group_keyprefix, group_id, key) def get_key_for_chord(self, group_id, key=''): """Get the cache key for the chord waiting on group with given id.""" + if not group_id: + raise ValueError(f'group_id must not be empty. Got {group_id} instead.') + return self._get_key_for(self.chord_keyprefix, group_id, key) + + def _get_key_for(self, prefix, id, key=''): key_t = self.key_t + return key_t('').join([ - self.chord_keyprefix, key_t(group_id), key_t(key), + prefix, key_t(id), key_t(key), ]) def _strip_prefix(self, key): diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index d65fdf2a41f..b9084522d25 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -705,6 +705,18 @@ def test_store_result_race_second_write_should_ignore_if_previous_success(self): stored_meta = self.b.decode(self.b.get(self.b.get_key_for_task(tid))) assert stored_meta['status'] == states.SUCCESS + def test_get_key_for_task_none_task_id(self): + with pytest.raises(ValueError): + self.b.get_key_for_task(None) + + def test_get_key_for_group_none_group_id(self): + with pytest.raises(ValueError): + self.b.get_key_for_task(None) + + def test_get_key_for_chord_none_group_id(self): + with pytest.raises(ValueError): + self.b.get_key_for_group(None) + def test_strip_prefix(self): x = self.b.get_key_for_task('x1b34') assert self.b._strip_prefix(x) == 'x1b34' From 2da6d8053d13dcf244cb8d10f8dfbd22bd61c9b7 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 24 May 2022 21:12:04 +0600 Subject: [PATCH 0113/1051] Update setup.py (#7534) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e650ceff4db..b1876c5f501 100755 --- a/setup.py +++ b/setup.py @@ -163,7 +163,7 @@ def run_tests(self): license='BSD', platforms=['any'], install_requires=install_requires(), - python_requires=">=3.7,", + python_requires=">=3.7", tests_require=reqs('test.txt'), extras_require=extras_require(), cmdclass={'test': pytest}, From 35a91276ee79e35da849c1cbdf051c94d360e262 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 25 May 2022 15:25:23 +0300 Subject: [PATCH 0114/1051] Updated the changelog. --- Changelog.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index d38ffefb9cf..295ac9c751c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -9,6 +9,27 @@ in the & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.2.7: + +5.2.7 +===== + +:release-date: 2022-5-25 15:30 P.M UTC+2:00 +:release-by: Omer Katz + +- Fix packaging issue which causes poetry 1.2b1 and above to fail install Celery (#7534). + +.. _version-5.2.6: + +5.2.6 +===== + +:release-date: 2022-4-04 21:15 P.M UTC+2:00 +:release-by: Omer Katz + +- load_extension_class_names - correct module_name (#7433). + This fixes a regression caused by #7218. + .. _version-5.2.5: 5.2.5 @@ -17,6 +38,8 @@ an overview of what's new in Celery 5.2. :release-date: 2022-4-03 20:42 P.M UTC+2:00 :release-by: Omer Katz +**This release was yanked due to a regression caused by the PR below** + - Use importlib instead of deprecated pkg_resources (#7218). .. _version-5.2.4: From 691d305398c59966d7cad428e2afcceb67a52aab Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Thu, 26 May 2022 12:07:06 +0300 Subject: [PATCH 0115/1051] =?UTF-8?q?Bump=20version:=205.2.6=20=E2=86=92?= =?UTF-8?q?=205.2.7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 4 ++-- Changelog.rst | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9ff614747e0..85e1bf24d8e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,9 +1,9 @@ [bumpversion] -current_version = 5.2.5 +current_version = 5.2.7 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? -serialize = +serialize = {major}.{minor}.{patch}{releaselevel} {major}.{minor}.{patch} diff --git a/Changelog.rst b/Changelog.rst index 295ac9c751c..a88ec2c16a1 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -14,7 +14,7 @@ an overview of what's new in Celery 5.2. 5.2.7 ===== -:release-date: 2022-5-25 15:30 P.M UTC+2:00 +:release-date: 2022-5-26 12:15 P.M UTC+2:00 :release-by: Omer Katz - Fix packaging issue which causes poetry 1.2b1 and above to fail install Celery (#7534). diff --git a/README.rst b/README.rst index 7fbea1028fb..18e1425985b 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.2.5 (dawn-chorus) +:Version: 5.2.7 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 0ed9f730e5c..053e2eadd48 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.2.5' +__version__ = '5.2.7' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 45b32667563..b5f691a8e07 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.2.5 (dawn-chorus) +:Version: 5.2.7 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 10d6a2e201b4f7f9bb53945344c1f2aaf920097c Mon Sep 17 00:00:00 2001 From: Aktan-A Date: Tue, 31 May 2022 11:41:01 +0600 Subject: [PATCH 0116/1051] Add notes about ignore_result attribute regarding canvas --- docs/userguide/canvas.rst | 10 ++++++++-- docs/userguide/tasks.rst | 3 +++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 6f626ab56f4..c0dc9ae13c8 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -655,6 +655,12 @@ Groups .. versionadded:: 3.0 +.. note:: + + Similarly to chords, tasks used in a group must *not* ignore their results. + See ":ref:`chord-important-notes`" for more information. + + A group can be used to execute several tasks in parallel. The :class:`~celery.group` function takes a list of signatures: @@ -698,7 +704,7 @@ the behaviour can be somewhat surprising due to the fact that groups are not real tasks and simply pass linked tasks down to their encapsulated signatures. This means that the return values of a group are not collected to be passed to a linked callback signature. -Additionally, linking the task will *not* guarantee that it will activate only +Additionally, linking the task will *not* guarantee that it will activate only when all group tasks have finished. As an example, the following snippet using a simple `add(a, b)` task is faulty since the linked `add.s()` signature will not received the finalised group @@ -814,7 +820,7 @@ Chords Tasks used within a chord must *not* ignore their results. If the result backend is disabled for *any* task (header or body) in your chord you - should read ":ref:`chord-important-notes`." Chords are not currently + should read ":ref:`chord-important-notes`". Chords are not currently supported with the RPC result backend. diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 3712b16b7b8..f3ae3366125 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -913,6 +913,9 @@ General :class:`~celery.result.AsyncResult` to check if the task is ready, or get its return value. + Note: Certain features will not work if task results are disabled. + For more details check the Canvas documentation. + .. attribute:: Task.store_errors_even_if_ignored If :const:`True`, errors will be stored even if the task is configured From b4fe2d998f1cac138e9ded938e52dfeadfe00bb8 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 1 Jun 2022 17:36:05 +0600 Subject: [PATCH 0117/1051] pytest-subtests==0.8.0 --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 179133446e5..bdeebe8c32f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ pytest~=7.1.1 pytest-celery -pytest-subtests==0.7.0 +pytest-subtests==0.8.0 pytest-timeout~=2.1.0 boto3>=1.9.178 moto>=2.2.6 From f5fe8b136e02d4f9547ffe7861370d7de38f2ce1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 1 Jun 2022 21:42:45 +0600 Subject: [PATCH 0118/1051] redis>=4.2.2 (#7493) --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index a88793fe8a5..e51f0ec519b 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=3.4.1,!=4.0.0,!=4.0.1 +redis>=4.2.2 From b0d6a3bc33c14b82451ffd6ebef2f9b403156ec4 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 1 Jun 2022 21:43:25 +0600 Subject: [PATCH 0119/1051] cryptography~=37.0.1 (#7492) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 682fb872fcb..7973b0677a7 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==36.0.2 +cryptography~=37.0.1 From 7fd63f531f0406bdbf6abd3bb0e3714d854f72d2 Mon Sep 17 00:00:00 2001 From: Espoir Murhabazi Date: Mon, 24 Jan 2022 18:21:58 +0200 Subject: [PATCH 0120/1051] Crontab string representation does not match UNIX crontab expression This is similar to this issue in the, I am attempting to fix in the celery global repos. I will give more info later. https://github.com/celery/django-celery-beat/issues/73 --- celery/schedules.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/celery/schedules.py b/celery/schedules.py index 8a2f3c9bc00..0daa8b67300 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -32,10 +32,11 @@ """ CRON_REPR = """\ -\ +\ """ + SOLAR_INVALID_LATITUDE = """\ Argument latitude {lat} is invalid, must be between -90 and 90.\ """ From 617a757c7b5d99c811713867013818827f46a4d0 Mon Sep 17 00:00:00 2001 From: Tom Parker-Shemilt Date: Sun, 5 Jun 2022 19:12:08 +0100 Subject: [PATCH 0121/1051] Worker should exit with ctx.exit to get the right exitcode for non-zero cases (#7544) * Worker should exit with ctx.exit to get the right exitcode for non-zero cases * Add fast-fail coverage to worker * Add unit test for celery worker exit * Fix non-encapsulated test app * Use test celery project * Use solo pool to try and fix windows thread issues * Disable capture to aid test debug --- celery/bin/worker.py | 2 +- requirements/test.txt | 1 + t/integration/test_worker.py | 18 ++++++ t/integration/test_worker_config.py | 12 ++++ t/unit/app/test_app.py | 4 +- t/unit/bin/proj/app.py | 1 + t/unit/bin/test_worker.py | 20 +++++++ t/unit/contrib/test_worker.py | 93 +++++++++++++---------------- tox.ini | 2 +- 9 files changed, 98 insertions(+), 55 deletions(-) create mode 100644 t/integration/test_worker.py create mode 100644 t/integration/test_worker_config.py create mode 100644 t/unit/bin/test_worker.py diff --git a/celery/bin/worker.py b/celery/bin/worker.py index b3fc91e986b..6a4b5533692 100644 --- a/celery/bin/worker.py +++ b/celery/bin/worker.py @@ -351,7 +351,7 @@ def worker(ctx, hostname=None, pool_cls=None, app=None, uid=None, gid=None, quiet=ctx.obj.quiet, **kwargs) worker.start() - return worker.exitcode + ctx.exit(worker.exitcode) except SecurityError as e: ctx.obj.error(e.args[0]) ctx.exit(1) diff --git a/requirements/test.txt b/requirements/test.txt index bdeebe8c32f..b8d769caab3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -2,6 +2,7 @@ pytest~=7.1.1 pytest-celery pytest-subtests==0.8.0 pytest-timeout~=2.1.0 +pytest-click boto3>=1.9.178 moto>=2.2.6 # typing extensions diff --git a/t/integration/test_worker.py b/t/integration/test_worker.py new file mode 100644 index 00000000000..9487753f4a5 --- /dev/null +++ b/t/integration/test_worker.py @@ -0,0 +1,18 @@ +import subprocess + +import pytest + + +def test_run_worker(): + with pytest.raises(subprocess.CalledProcessError) as exc_info: + subprocess.check_output( + ["celery", "--config", "t.integration.test_worker_config", "worker"], + stderr=subprocess.STDOUT) + + called_process_error = exc_info.value + assert called_process_error.returncode == 1, called_process_error + output = called_process_error.output.decode('utf-8') + assert output.find( + "Retrying to establish a connection to the message broker after a connection " + "loss has been disabled (app.conf.broker_connection_retry_on_startup=False). " + "Shutting down...") != -1, output diff --git a/t/integration/test_worker_config.py b/t/integration/test_worker_config.py new file mode 100644 index 00000000000..d52109c3a41 --- /dev/null +++ b/t/integration/test_worker_config.py @@ -0,0 +1,12 @@ +# Test config for t/integration/test_worker.py + +broker_url = 'amqp://guest:guest@foobar:1234//' + +# Fail fast for test_run_worker +broker_connection_retry_on_startup = False +broker_connection_retry = False +broker_connection_timeout = 0 + +worker_log_color = False + +worker_redirect_stdouts = False diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index cb68b5b69ef..0402c3bc3fc 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -591,8 +591,8 @@ def test_worker_main(self, mocked_celery): mocked_celery.main.assert_called_with( args=['worker', '--help'], standalone_mode=False) - def test_config_from_envvar(self): - os.environ['CELERYTEST_CONFIG_OBJECT'] = 't.unit.app.test_app' + def test_config_from_envvar(self, monkeypatch): + monkeypatch.setenv("CELERYTEST_CONFIG_OBJECT", 't.unit.app.test_app') self.app.config_from_envvar('CELERYTEST_CONFIG_OBJECT') assert self.app.conf.THIS_IS_A_KEY == 'this is a value' diff --git a/t/unit/bin/proj/app.py b/t/unit/bin/proj/app.py index 95c460c5777..f8762238236 100644 --- a/t/unit/bin/proj/app.py +++ b/t/unit/bin/proj/app.py @@ -1,3 +1,4 @@ from celery import Celery app = Celery(set_as_current=False) +app.config_from_object("t.integration.test_worker_config") diff --git a/t/unit/bin/test_worker.py b/t/unit/bin/test_worker.py new file mode 100644 index 00000000000..50a07e3b674 --- /dev/null +++ b/t/unit/bin/test_worker.py @@ -0,0 +1,20 @@ +import pytest +from click.testing import CliRunner + +from celery.app.log import Logging +from celery.bin.celery import celery + + +@pytest.fixture(scope='session') +def use_celery_app_trap(): + return False + + +def test_cli(isolated_cli_runner: CliRunner): + Logging._setup = True # To avoid hitting the logging sanity checks + res = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "worker", "--pool", "solo"], + catch_exceptions=False + ) + assert res.exit_code == 1, (res, res.stdout) diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index ad4efdb5529..f2ccf0625bd 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -1,56 +1,47 @@ import pytest +# this import adds a @shared_task, which uses connect_on_app_finalize +# to install the celery.ping task that the test lib uses +import celery.contrib.testing.tasks # noqa: F401 from celery import Celery from celery.contrib.testing.worker import start_worker -app = Celery('celerytest', - backend='cache+memory://', - broker='memory://', - ) - - -@app.task -def add(x, y): - return x + y - - -def test_start_worker(): - app.config_from_object({ - 'worker_hijack_root_logger': False, - }) - # this import adds a @shared_task, which uses connect_on_app_finalize - # to install the celery.ping task that the test lib uses - import celery.contrib.testing.tasks # noqa: F401 - - # to avoid changing the root logger level to ERROR, - # we have we have to set both app.log.loglevel start_worker arg to 0 - # (see celery.app.log.setup_logging_subsystem) - app.log.loglevel = 0 - with start_worker(app=app, loglevel=0): - result = add.s(1, 2).apply_async() - val = result.get(timeout=5) - assert val == 3 - - -@app.task -def error_task(): - raise NotImplementedError() - - -def test_start_worker_with_exception(): - """Make sure that start_worker does not hang on exception""" - app.config_from_object({ - 'worker_hijack_root_logger': False, - }) - # this import adds a @shared_task, which uses connect_on_app_finalize - # to install the celery.ping task that the test lib uses - import celery.contrib.testing.tasks # noqa: F401 - - # to avoid changing the root logger level to ERROR, - # we have we have to set both app.log.loglevel start_worker arg to 0 - # (see celery.app.log.setup_logging_subsystem) - app.log.loglevel = 0 - with pytest.raises(NotImplementedError): - with start_worker(app=app, loglevel=0): - result = error_task.apply_async() - result.get(timeout=5) + +class test_worker: + def setup(self): + self.app = Celery('celerytest', backend='cache+memory://', broker='memory://',) + + @self.app.task + def add(x, y): + return x + y + + self.add = add + + @self.app.task + def error_task(): + raise NotImplementedError() + + self.error_task = error_task + + self.app.config_from_object({ + 'worker_hijack_root_logger': False, + }) + + # to avoid changing the root logger level to ERROR, + # we have we have to set both app.log.loglevel start_worker arg to 0 + # (see celery.app.log.setup_logging_subsystem) + self.app.log.loglevel = 0 + + def test_start_worker(self): + with start_worker(app=self.app, loglevel=0): + result = self.add.s(1, 2).apply_async() + val = result.get(timeout=5) + assert val == 3 + + def test_start_worker_with_exception(self): + """Make sure that start_worker does not hang on exception""" + + with pytest.raises(NotImplementedError): + with start_worker(app=self.app, loglevel=0): + result = self.error_task.apply_async() + result.get(timeout=5) diff --git a/tox.ini b/tox.ini index b9901ca35d3..bb456a64e8f 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,7 @@ deps= bandit: bandit commands = - unit: pytest --maxfail=10 -v --cov=celery --cov-report=xml --cov-report term {posargs} + unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ From 366a1f77d7fbb40f880e8c968ed1591430329fe3 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Jun 2022 00:15:16 +0600 Subject: [PATCH 0122/1051] boto3>=1.22.2 (#7496) --- requirements/extras/dynamodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/dynamodb.txt b/requirements/extras/dynamodb.txt index 30e5f8e0f2b..f52faa35c3a 100644 --- a/requirements/extras/dynamodb.txt +++ b/requirements/extras/dynamodb.txt @@ -1 +1 @@ -boto3>=1.9.178 +boto3>=1.22.2 From f232ae0450e4cb7a61f7cddc55d9775e14ab12d9 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Jun 2022 00:15:49 +0600 Subject: [PATCH 0123/1051] cassandra-driver>=3.25.0,<4 (#7495) --- requirements/extras/cassandra.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/cassandra.txt b/requirements/extras/cassandra.txt index b84a7360ace..2c2f27308fb 100644 --- a/requirements/extras/cassandra.txt +++ b/requirements/extras/cassandra.txt @@ -1 +1 @@ -cassandra-driver>=3.24.0,<4 +cassandra-driver>=3.25.0,<4 From 9f891ee7b436875b699bf89956cf7ff724ae62e8 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Jun 2022 00:16:07 +0600 Subject: [PATCH 0124/1051] pyArango>=2.0.1 (#7491) --- requirements/extras/arangodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/arangodb.txt b/requirements/extras/arangodb.txt index 1a6b85f1294..f081bacacfe 100644 --- a/requirements/extras/arangodb.txt +++ b/requirements/extras/arangodb.txt @@ -1 +1 @@ -pyArango>=1.3.2 \ No newline at end of file +pyArango>=2.0.1 From 13bd136871d1954a56b5f4300bfdfac396070b1c Mon Sep 17 00:00:00 2001 From: dobosevych Date: Tue, 7 Jun 2022 11:00:08 +0300 Subject: [PATCH 0125/1051] Fix expiration check (#7552) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * tFixed celery task expiration check, expires value can be ISO8601 for task retry * Fixed tests and migrate them to another branch * Fixed tests and migrate them to another branch Co-authored-by: Luboš Mátl --- celery/app/base.py | 3 ++- requirements/test-ci-default.txt | 1 + t/unit/tasks/test_tasks.py | 11 +++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/celery/app/base.py b/celery/app/base.py index cf2a3ac3671..842e3416070 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -733,7 +733,8 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, options, route_name or name, args, kwargs, task_type) if expires is not None: if isinstance(expires, datetime): - expires_s = (maybe_make_aware(expires) - self.now()).total_seconds() + expires_s = (maybe_make_aware( + expires) - self.now()).total_seconds() else: expires_s = expires diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index 953ed9aecc7..e6a3014cf7f 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -18,6 +18,7 @@ -r extras/cosmosdbsql.txt -r extras/cassandra.txt -r extras/azureblockblob.txt +git+https://github.com/celery/kombu.git # SQS dependencies other than boto pycurl==7.43.0.5 # Latest version with wheel built (for appveyor) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 89689914f26..ebfe89e1ad7 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -983,6 +983,17 @@ def test_regular_task(self): name='George Costanza', test_eta=True, test_expires=True, ) + # With ETA, absolute expires in the past in ISO format. + presult2 = self.mytask.apply_async( + kwargs={'name': 'George Costanza'}, + eta=self.now() + timedelta(days=1), + expires=self.now() - timedelta(days=2), + ) + self.assert_next_task_data_equal( + consumer, presult2, self.mytask.name, + name='George Costanza', test_eta=True, test_expires=True, + ) + # Default argsrepr/kwargsrepr behavior presult2 = self.mytask.apply_async( args=('spam',), kwargs={'name': 'Jerry Seinfeld'} From 0a783edd229783d834caa2a9dd8c79647a391cbd Mon Sep 17 00:00:00 2001 From: Gabriel Soldani <1268700+gabrielsoldani@users.noreply.github.com> Date: Tue, 7 Jun 2022 09:25:26 -0300 Subject: [PATCH 0126/1051] Use `callable` built-in Closes #3964. --- celery/utils/functional.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/functional.py b/celery/utils/functional.py index bcc15a3c788..9402a123658 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -311,7 +311,7 @@ def head_from_fun(fun, bound=False, debug=False): # with an empty body, meaning it has the same performance as # as just calling a function. is_function = inspect.isfunction(fun) - is_callable = hasattr(fun, '__call__') + is_callable = callable(fun) is_cython = fun.__class__.__name__ == 'cython_function_or_method' is_method = inspect.ismethod(fun) From 45a553c9bea6b5679c137f7e4c4372c280184166 Mon Sep 17 00:00:00 2001 From: dobosevych Date: Thu, 9 Jun 2022 15:20:03 +0300 Subject: [PATCH 0127/1051] Include `dont_autoretry_for` option in tasks. (#7556) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Include `dont_autoretry_for` option in tasks. * Fixed issue with failing tests * Fixed flake8 issue * Change version added Co-authored-by: Manuel Vázquez Acosta --- celery/app/autoretry.py | 6 ++++++ docs/userguide/tasks.rst | 9 +++++++++ t/unit/tasks/test_tasks.py | 24 ++++++++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/celery/app/autoretry.py b/celery/app/autoretry.py index a5fe700b650..15747e5173f 100644 --- a/celery/app/autoretry.py +++ b/celery/app/autoretry.py @@ -11,6 +11,10 @@ def add_autoretry_behaviour(task, **options): options.get('autoretry_for', getattr(task, 'autoretry_for', ())) ) + dont_autoretry_for = tuple( + options.get('dont_autoretry_for', + getattr(task, 'dont_autoretry_for', ())) + ) retry_kwargs = options.get( 'retry_kwargs', getattr(task, 'retry_kwargs', {}) ) @@ -38,6 +42,8 @@ def run(*args, **kwargs): raise except Retry: raise + except dont_autoretry_for: + raise except autoretry_for as exc: if retry_backoff: retry_kwargs['countdown'] = \ diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index f3ae3366125..f41b53e61ec 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -787,6 +787,15 @@ You can also set `autoretry_for`, `max_retries`, `retry_backoff`, `retry_backoff and the actual delay value will be a random number between zero and that maximum. By default, this option is set to ``True``. +.. versionadded:: 5.3.0 + +.. attribute:: Task.dont_autoretry_for + + A list/tuple of exception classes. These exceptions won't be autoretried. + This allows to exclude some exceptions that match `autoretry_for + `:attr: but for which you don't want a retry. + + .. _task-options: List of Options diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index ebfe89e1ad7..e23bc4a091f 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -48,6 +48,15 @@ class TaskWithRetry(Task): retry_jitter = False +class TaskWithRetryButForTypeError(Task): + autoretry_for = (Exception,) + dont_autoretry_for = (TypeError,) + retry_kwargs = {'max_retries': 5} + retry_backoff = True + retry_backoff_max = 700 + retry_jitter = False + + class TasksCase: def setup(self): @@ -222,6 +231,15 @@ def autoretry_task(self, a, b): self.autoretry_task = autoretry_task + @self.app.task(bind=True, autoretry_for=(ArithmeticError,), + dont_autoretry_for=(ZeroDivisionError,), + retry_kwargs={'max_retries': 5}, shared=False) + def autoretry_arith_task(self, a, b): + self.iterations += 1 + return a / b + + self.autoretry_arith_task = autoretry_arith_task + @self.app.task(bind=True, autoretry_for=(HTTPError,), retry_backoff=True, shared=False) def autoretry_backoff_task(self, url): @@ -561,6 +579,12 @@ def test_autoretry(self): self.autoretry_task.apply((1, 0)) assert self.autoretry_task.iterations == 6 + def test_autoretry_arith(self): + self.autoretry_arith_task.max_retries = 3 + self.autoretry_arith_task.iterations = 0 + self.autoretry_arith_task.apply((1, 0)) + assert self.autoretry_arith_task.iterations == 1 + @patch('random.randrange', side_effect=lambda i: i - 1) def test_autoretry_backoff(self, randrange): task = self.autoretry_backoff_task From 892dd8fb732cc711ceacd90a8dca05ff02c79aa7 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 9 Jun 2022 19:24:12 +0600 Subject: [PATCH 0128/1051] only pull requests and some other updates (#7559) * only pull requests and some other updates * Update lint_python.yml --- .github/workflows/lint_python.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml index eafb4dfdff3..e434e9596e2 100644 --- a/.github/workflows/lint_python.yml +++ b/.github/workflows/lint_python.yml @@ -1,14 +1,14 @@ -name: lint_python -on: [pull_request, push] +name: lint Python +on: [pull_request] jobs: lint_python: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: pre-commit/action@v2.0.3 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 - run: pip install --upgrade pip wheel - - run: pip install bandit codespell flake8 isort pytest pyupgrade tox + - run: pip install -U bandit codespell flake8 isort pytest pyupgrade tox - name: bandit run: bandit -r . || true From 6b886b6f00f304371f29f071201218b229a3ae6a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 9 Jun 2022 19:42:56 +0600 Subject: [PATCH 0129/1051] setup-python v4 (#7558) --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 4d2da0ecd66..60385d03a27 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -40,7 +40,7 @@ jobs: sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -107,7 +107,7 @@ jobs: sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} From 04ceb12c20248b07cc1b0d2a436e53984d1bb2e5 Mon Sep 17 00:00:00 2001 From: aquiline Date: Wed, 8 Jun 2022 22:22:18 +0530 Subject: [PATCH 0130/1051] fix: Syntax error in arango query Add missing closing bracket for the DOCUMENT function --- celery/backends/arangodb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/backends/arangodb.py b/celery/backends/arangodb.py index a7575741575..d456d0fc4da 100644 --- a/celery/backends/arangodb.py +++ b/celery/backends/arangodb.py @@ -168,7 +168,7 @@ def mget(self, keys): logging.debug( """ FOR key in {keys} - RETURN DOCUMENT(CONCAT("{collection}/", key).task + RETURN DOCUMENT(CONCAT("{collection}/", key)).task """.format( collection=self.collection, keys=json_keys ) @@ -176,7 +176,7 @@ def mget(self, keys): query = self.db.AQLQuery( """ FOR key in {keys} - RETURN DOCUMENT(CONCAT("{collection}/", key).task + RETURN DOCUMENT(CONCAT("{collection}/", key)).task """.format( collection=self.collection, keys=json_keys ) From 353c9f314e2318ececaf36428381976bf0db63d1 Mon Sep 17 00:00:00 2001 From: Oleg Romanovskyi Date: Fri, 10 Jun 2022 09:26:31 +0300 Subject: [PATCH 0131/1051] Fix custom headers propagation on task retries (#7555) * Fix custom headers propagation on task retries * Add unit tests for `Context` custom headers --- CONTRIBUTORS.txt | 3 ++- celery/app/task.py | 12 ++++++++++++ t/integration/tasks.py | 10 ++++++++++ t/integration/test_tasks.py | 10 +++++++++- t/unit/tasks/test_context.py | 21 +++++++++++++++++++++ 5 files changed, 54 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index fc6b8d4b874..d847bb2492d 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -286,4 +286,5 @@ Patrick Zhang, 2017/08/19 Konstantin Kochin, 2021/07/11 kronion, 2021/08/26 Gabor Boros, 2021/11/09 -Tizian Seehaus, 2022/02/09 \ No newline at end of file +Tizian Seehaus, 2022/02/09 +Oleh Romanovskyi, 2022/06/09 diff --git a/celery/app/task.py b/celery/app/task.py index f5a653e278a..b594f063ddf 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -96,6 +96,18 @@ class Context: def __init__(self, *args, **kwargs): self.update(*args, **kwargs) + if self.headers is None: + self.headers = self._get_custom_headers(*args, **kwargs) + + def _get_custom_headers(self, *args, **kwargs): + headers = {} + headers.update(*args, **kwargs) + celery_keys = {*Context.__dict__.keys(), 'lang', 'task', 'argsrepr', 'kwargsrepr'} + for key in celery_keys: + headers.pop(key, None) + if not headers: + return None + return headers def update(self, *args, **kwargs): return self.__dict__.update(*args, **kwargs) diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 1e2b8047bd7..761c4a48980 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -217,6 +217,16 @@ def retry_once_priority(self, *args, expires=60.0, max_retries=1, max_retries=max_retries) +@shared_task(bind=True, max_retries=1) +def retry_once_headers(self, *args, max_retries=1, + countdown=0.1): + """Task that fails and is retried. Returns headers.""" + if self.request.retries: + return self.request.headers + raise self.retry(countdown=countdown, + max_retries=max_retries) + + @shared_task def redis_echo(message, redis_key="redis-echo"): """Task that appends the message to a redis list.""" diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index a7ee94ee40d..198881b891c 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -8,7 +8,8 @@ from .conftest import get_active_redis_channels from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, - print_unicode, retry, retry_once, retry_once_priority, return_properties, sleeping) + print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, return_properties, + sleeping) TIMEOUT = 10 @@ -267,6 +268,13 @@ def test_task_retried_priority(self, manager): res = retry_once_priority.apply_async(priority=7) assert res.get(timeout=TIMEOUT) == 7 # retried once with priority 7 + @flaky + def test_task_retried_headers(self, manager): + res = retry_once_headers.apply_async(headers={'x-test-header': 'test-value'}) + headers = res.get(timeout=TIMEOUT) + assert headers is not None # retried once with headers + assert 'x-test-header' in headers # retry keeps custom headers + @flaky def test_unicode_task(self, manager): manager.join( diff --git a/t/unit/tasks/test_context.py b/t/unit/tasks/test_context.py index 53d79466b2d..0af40515375 100644 --- a/t/unit/tasks/test_context.py +++ b/t/unit/tasks/test_context.py @@ -63,3 +63,24 @@ def test_context_get(self): ctx_dict = get_context_as_dict(ctx, getter=Context.get) assert ctx_dict == expected assert get_context_as_dict(Context()) == default_context + + def test_extract_headers(self): + # Should extract custom headers from the request dict + request = { + 'task': 'test.test_task', + 'id': 'e16eeaee-1172-49bb-9098-5437a509ffd9', + 'custom-header': 'custom-value', + } + ctx = Context(request) + assert ctx.headers == {'custom-header': 'custom-value'} + + def test_dont_override_headers(self): + # Should not override headers if defined in the request + request = { + 'task': 'test.test_task', + 'id': 'e16eeaee-1172-49bb-9098-5437a509ffd9', + 'headers': {'custom-header': 'custom-value'}, + 'custom-header-2': 'custom-value-2', + } + ctx = Context(request) + assert ctx.headers == {'custom-header': 'custom-value'} From 1c7fa002b1fc601694c28d582e06cd9370bef54b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 16:56:46 +0000 Subject: [PATCH 0132/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.32.1 → v2.34.0](https://github.com/asottile/pyupgrade/compare/v2.32.1...v2.34.0) - [github.com/pre-commit/pre-commit-hooks: v4.2.0 → v4.3.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.2.0...v4.3.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f70b1e4d643..1cac64fbef2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.32.1 + rev: v2.34.0 hooks: - id: pyupgrade args: ["--py37-plus"] @@ -16,7 +16,7 @@ repos: - id: yesqa - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v4.3.0 hooks: - id: check-merge-conflict - id: check-toml From ceb0af747053e9ea1e33dff643a2045364ffcbce Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 14 Jun 2022 22:13:22 +0600 Subject: [PATCH 0133/1051] billiard & other update (#7489) * billiard~=4.0.0 * billiard>=4.0.0,5.0 * update * billiard==4.0.0 * billiard>=3.6.4.0,<5.0 --- requirements/default.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index 0203186c858..23605ce2c65 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,9 +1,9 @@ -pytz>=2021.3 -billiard>=3.6.4.0,<4.0 +pytz>=2022.1 +billiard>=3.6.4.0,<5.0 kombu>=5.2.3,<6.0 vine>=5.0.0,<6.0 -click>=8.0.3,<9.0 -click-didyoumean>=0.0.3 +click>=8.1.2,<9.0 +click-didyoumean>=0.3.0 click-repl>=0.2.0 click-plugins>=1.1.1 importlib-metadata>=1.4.0; python_version < '3.8' From bbce40c732ca3b783444f56a9b4a02c06e054642 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 14 Jun 2022 11:15:34 -0500 Subject: [PATCH 0134/1051] Scheduled weekly dependency update for week 24 (#7566) * Pin billiard to latest version 4.0.0 * Pin sphinx-click to latest version 4.1.0 * Pin pytest-celery to latest version 0.0.0 * Pin pytest-celery to latest version 0.0.0 * Pin pytest-click to latest version 1.1.0 * Pin mypy to latest version 0.961 * Update pre-commit from 2.18.1 to 2.19.0 * Update msgpack from 1.0.3 to 1.0.4 * Pin python-consul2 to latest version 0.1.5 * Update pycouchdb from 1.14.1 to 1.14.2 * Pin elasticsearch to latest version 8.2.2 * Pin zstandard to latest version 0.17.0 * Pin pydocstyle to latest version 6.1.1 * Pin bumpversion to latest version 0.6.0 * Pin pytest-cov to latest version 3.0.0 * Pin pytest-github-actions-annotate-failures to latest version 0.1.6 * Update pycurl from 7.43.0.5 to 7.45.1 * pin ElS * pycurl==7.43.0.5 * billiard>=3.6.4.0,<5.0 Co-authored-by: Asif Saif Uddin --- requirements/default.txt | 2 +- requirements/docs.txt | 2 +- requirements/extras/consul.txt | 2 +- requirements/extras/couchdb.txt | 2 +- requirements/extras/msgpack.txt | 2 +- requirements/extras/pytest.txt | 2 +- requirements/extras/zstd.txt | 2 +- requirements/pkgutils.txt | 4 ++-- requirements/test-ci-base.txt | 4 ++-- requirements/test-ci-default.txt | 2 +- requirements/test.txt | 8 ++++---- 11 files changed, 16 insertions(+), 16 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index 23605ce2c65..0f7d1d4941d 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,4 +1,4 @@ -pytz>=2022.1 +pytz>=2021.3 billiard>=3.6.4.0,<5.0 kombu>=5.2.3,<6.0 vine>=5.0.0,<6.0 diff --git a/requirements/docs.txt b/requirements/docs.txt index 9a268ca733b..5a7f66c9d06 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery~=2.0.0 Sphinx>=3.0.0 sphinx-testing~=1.0.1 -sphinx-click~=4.0.3 +sphinx-click==4.1.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt diff --git a/requirements/extras/consul.txt b/requirements/extras/consul.txt index 7b85dde7b66..19ca97b0d46 100644 --- a/requirements/extras/consul.txt +++ b/requirements/extras/consul.txt @@ -1 +1 @@ -python-consul2 +python-consul2==0.1.5 diff --git a/requirements/extras/couchdb.txt b/requirements/extras/couchdb.txt index 0e21a4ff6b6..3942c0d775f 100644 --- a/requirements/extras/couchdb.txt +++ b/requirements/extras/couchdb.txt @@ -1 +1 @@ -pycouchdb==1.14.1 +pycouchdb==1.14.2 diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index ea1047efad5..f912067dd44 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.3 +msgpack==1.0.4 diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 8e0e5f3471b..6daa4ff1249 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery +pytest-celery==0.0.0 diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index 864700d2b3e..9f5bc8a143b 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard +zstandard==0.17.0 diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt index ea4078d78b4..abe74e0ef86 100644 --- a/requirements/pkgutils.txt +++ b/requirements/pkgutils.txt @@ -3,9 +3,9 @@ wheel>=0.33.1 flake8>=3.8.3 flakeplus>=1.1 flake8-docstrings~=1.5 -pydocstyle~=5.0; python_version >= '3.0' +pydocstyle==6.1.1; python_version >= '3.0' tox>=3.8.4 sphinx2rst>=1.0 # Disable cyanide until it's fully updated. # cyanide>=1.0.1 -bumpversion +bumpversion==0.6.0 diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 63a15706a7c..23316a0aec1 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,5 +1,5 @@ -pytest-cov -pytest-github-actions-annotate-failures +pytest-cov==3.0.0 +pytest-github-actions-annotate-failures==0.1.6 codecov==2.1.12 -r extras/redis.txt -r extras/sqlalchemy.txt diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index e6a3014cf7f..93141b96175 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -21,4 +21,4 @@ git+https://github.com/celery/kombu.git # SQS dependencies other than boto -pycurl==7.43.0.5 # Latest version with wheel built (for appveyor) +pycurl==7.43.0.5 diff --git a/requirements/test.txt b/requirements/test.txt index b8d769caab3..66109b1c1c1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,12 +1,12 @@ pytest~=7.1.1 -pytest-celery +pytest-celery==0.0.0 pytest-subtests==0.8.0 pytest-timeout~=2.1.0 -pytest-click +pytest-click==1.1.0 boto3>=1.9.178 moto>=2.2.6 # typing extensions -mypy; platform_python_implementation=="CPython" -pre-commit==2.18.1 +mypy==0.961; platform_python_implementation=="CPython" +pre-commit==2.19.0 -r extras/yaml.txt -r extras/msgpack.txt From 5acc7f39b228fd6bb74b9219e71c87f35e641423 Mon Sep 17 00:00:00 2001 From: Atiab Bin Zakaria <61742543+atiabbz@users.noreply.github.com> Date: Wed, 15 Jun 2022 15:08:45 +0800 Subject: [PATCH 0135/1051] docs: assorted fixes (#7572) * docs: ensure consistency in `Backends and Brokers` descriptions * docs: replace semicolon with colon in `First Steps with Celery` * docs: replace backtick with apostrophe in `Frequently Asked Questions` --- docs/faq.rst | 2 +- docs/getting-started/backends-and-brokers/index.rst | 2 +- docs/getting-started/first-steps-with-celery.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/faq.rst b/docs/faq.rst index 1b11c1840f0..29cd77900bd 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -220,7 +220,7 @@ You can do that by adding the following to your :file:`my.cnf`:: [mysqld] transaction-isolation = READ-COMMITTED -For more information about InnoDB`s transaction model see `MySQL - The InnoDB +For more information about InnoDB’s transaction model see `MySQL - The InnoDB Transaction Model and Locking`_ in the MySQL user manual. (Thanks to Honza Kral and Anton Tsigularov for this solution) diff --git a/docs/getting-started/backends-and-brokers/index.rst b/docs/getting-started/backends-and-brokers/index.rst index d50b0b5e526..6b0c35e2d8b 100644 --- a/docs/getting-started/backends-and-brokers/index.rst +++ b/docs/getting-started/backends-and-brokers/index.rst @@ -96,6 +96,6 @@ If you already integrate tightly with AWS, and are familiar with SQS, it present SQLAlchemy ---------- -SQLAlchemy is backend. +SQLAlchemy is a backend. It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is a ORM, and is the way Celery can use a SQL DB as a result backend. Historically, SQLAlchemy has not been the most stable result backend so if chosen one should proceed with caution. diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst index a87af8f7201..12222e5c223 100644 --- a/docs/getting-started/first-steps-with-celery.rst +++ b/docs/getting-started/first-steps-with-celery.rst @@ -14,7 +14,7 @@ tools and support you need to run such a system in production. In this tutorial you'll learn the absolute basics of using Celery. -Learn about; +Learn about: - Choosing and installing a message transport (broker). - Installing Celery and creating your first task. From 4548da720e750720736fcfaf39df601cfb666350 Mon Sep 17 00:00:00 2001 From: EricAtORS Date: Wed, 15 Jun 2022 08:14:47 -0700 Subject: [PATCH 0136/1051] Fix order of arguments for clarity (#7543) --- docs/userguide/canvas.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index c0dc9ae13c8..e6b4014ea11 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -244,7 +244,7 @@ arguments: >>> add.apply_async((2, 2), link=add.s(8)) As expected this will first launch one task calculating :math:`2 + 2`, then -another task calculating :math:`4 + 8`. +another task calculating :math:`8 + 4`. The Primitives ============== From 4b7986ff58135c228763bd7e6016cca4b8c3d1b0 Mon Sep 17 00:00:00 2001 From: Javadz Date: Fri, 17 Jun 2022 10:01:21 +0430 Subject: [PATCH 0137/1051] Removed Flower monitor screenshot --- docs/userguide/monitoring.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/userguide/monitoring.rst b/docs/userguide/monitoring.rst index 725f264057f..9a55dccc5c7 100644 --- a/docs/userguide/monitoring.rst +++ b/docs/userguide/monitoring.rst @@ -266,9 +266,6 @@ Features .. figure:: ../images/dashboard.png :width: 700px -.. figure:: ../images/monitor.png - :width: 700px - More screenshots_: .. _screenshots: https://github.com/mher/flower/tree/master/docs/screenshots From c955080aab64a09727f52a8438949c76995c703b Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 20 Jun 2022 21:27:21 -0500 Subject: [PATCH 0138/1051] Scheduled weekly dependency update for week 25 (#7589) * Update sphinx-click from 4.1.0 to 4.2.0 * Pin elasticsearch to latest version 8.2.3 * Update pycurl from 7.43.0.5 to 7.45.1 * pycurl==7.43.0.5 * elasticsearch<8.0 Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 5a7f66c9d06..f6e6432f103 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery~=2.0.0 Sphinx>=3.0.0 sphinx-testing~=1.0.1 -sphinx-click==4.1.0 +sphinx-click==4.2.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From e975830e82b889d3a0e90af5697be9c4e7790e6d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 21 Jun 2022 08:32:09 +0600 Subject: [PATCH 0139/1051] restructure codeql ci --- .github/workflows/codeql-analysis.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 9f948a98cf9..4d311d5c529 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -17,8 +17,7 @@ on: pull_request: # The branches below must be a subset of the branches above branches: [ master ] - schedule: - - cron: '18 4 * * 2' + jobs: analyze: @@ -38,11 +37,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -67,4 +66,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v2 From 25b0e3a6883f832144d25eb1c11a35e786408703 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 21 Jun 2022 08:32:42 +0600 Subject: [PATCH 0140/1051] github/codeql-action/autobuild@v2 --- .github/workflows/codeql-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 4d311d5c529..66b08d30051 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -52,7 +52,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl From 7fdf0ba11f1e158e61108fd68b1c9d2f0846f107 Mon Sep 17 00:00:00 2001 From: kwikwag Date: Thu, 27 Jan 2022 23:49:04 +0200 Subject: [PATCH 0141/1051] Silence backend warning when eager results are stored When task_always_eager is enabled, the backend issues a warning when trying to get task data, This cancels this warning in case task_store_eager_result is enabled as well. --- celery/backends/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 5f76191b136..281c5de0504 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -569,9 +569,10 @@ def get_children(self, task_id): pass def _ensure_not_eager(self): - if self.app.conf.task_always_eager: + if self.app.conf.task_always_eager and not self.app.conf.task_store_eager_result: warnings.warn( - "Shouldn't retrieve result with task_always_eager enabled.", + "Results are not stored in backend and should not be retrieved when " + "task_always_eager is enabled, unless task_store_eager_result is enabled.", RuntimeWarning ) From 89db86304f57ec1cfbeffc0c2764b2cf97c83545 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 22 Jun 2022 13:03:54 +0300 Subject: [PATCH 0142/1051] Reduce prefetch count on restart and gradually restore it (#7350) * Reduce prefetch count on restart and gradually restore it. * Rename. * Ensure we always eventually increase the QoS. * Ensure _maximum_prefetch_restored isn't racy. * Set default num_processes to 2. * Add unit test. * Celery set prefetch count on restart (#7390) * Added test for create_task_handler * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Changed consumer.py * Fixed celery consumer implementation style * Improved test_create_task_handler. Used task_message_from_sig instread of manual mocking Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * Celery set prefetch count on restart (#7403) * Added test for create_task_handler * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Changed consumer.py * Fixed celery consumer implementation style * Improved test_create_task_handler. Used task_message_from_sig instread of manual mocking * Fixed test_worker.py Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * Add a log message to notify users we're reducing the prefetch count. * Notify user when normal operations are resumed. * Document broker reconnection behaviour * Marked which feature was added in which version. Co-authored-by: dobosevych Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Omer Katz --- celery/worker/consumer/consumer.py | 66 +++++++++++++++++++++++++- docs/userguide/workers.rst | 22 +++++++++ examples/app/myapp.py | 3 ++ t/unit/worker/test_consumer.py | 74 +++++++++++++++++++++++++++++- t/unit/worker/test_worker.py | 1 + 5 files changed, 163 insertions(+), 3 deletions(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index f74326c506f..98ead56139a 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -199,6 +199,7 @@ def __init__(self, on_task_request, self.disable_rate_limits = disable_rate_limits self.initial_prefetch_count = initial_prefetch_count self.prefetch_multiplier = prefetch_multiplier + self._maximum_prefetch_restored = True # this contains a tokenbucket for each task type by name, used for # rate limits, or None if rate limits are disabled for that task. @@ -380,6 +381,20 @@ def on_connection_error_after_connected(self, exc): else: warnings.warn(CANCEL_TASKS_BY_DEFAULT, CPendingDeprecationWarning) + self.initial_prefetch_count = max( + self.prefetch_multiplier, + self.max_prefetch_count - len(tuple(active_requests)) * self.prefetch_multiplier + ) + + self._maximum_prefetch_restored = self.initial_prefetch_count == self.max_prefetch_count + if not self._maximum_prefetch_restored: + logger.info( + f"Temporarily reducing the prefetch count to {self.initial_prefetch_count} to avoid over-fetching " + f"since {len(tuple(active_requests))} tasks are currently being processed.\n" + f"The prefetch count will be gradually restored to {self.max_prefetch_count} as the tasks " + "complete processing." + ) + def register_with_event_loop(self, hub): self.blueprint.send_all( self, 'register_with_event_loop', args=(hub,), @@ -622,10 +637,31 @@ def on_task_received(message): return on_unknown_task(None, message, exc) else: try: + ack_log_error_promise = promise( + call_soon, + (message.ack_log_error,), + on_error=self._restore_prefetch_count_after_connection_restart, + ) + reject_log_error_promise = promise( + call_soon, + (message.reject_log_error,), + on_error=self._restore_prefetch_count_after_connection_restart, + ) + + if ( + not self._maximum_prefetch_restored + and self.restart_count > 0 + and self._new_prefetch_count <= self.max_prefetch_count + ): + ack_log_error_promise.then(self._restore_prefetch_count_after_connection_restart, + on_error=self._restore_prefetch_count_after_connection_restart) + reject_log_error_promise.then(self._restore_prefetch_count_after_connection_restart, + on_error=self._restore_prefetch_count_after_connection_restart) + strategy( message, payload, - promise(call_soon, (message.ack_log_error,)), - promise(call_soon, (message.reject_log_error,)), + ack_log_error_promise, + reject_log_error_promise, callbacks, ) except (InvalidTaskError, ContentDisallowed) as exc: @@ -635,6 +671,32 @@ def on_task_received(message): return on_task_received + def _restore_prefetch_count_after_connection_restart(self, p, *args): + with self.qos._mutex: + if self._maximum_prefetch_restored: + return + + new_prefetch_count = min(self.max_prefetch_count, self._new_prefetch_count) + self.qos.value = self.initial_prefetch_count = new_prefetch_count + self.qos.set(self.qos.value) + + already_restored = self._maximum_prefetch_restored + self._maximum_prefetch_restored = new_prefetch_count == self.max_prefetch_count + + if already_restored is False and self._maximum_prefetch_restored is True: + logger.info( + "Resuming normal operations following a restart.\n" + f"Prefetch count has been restored to the maximum of {self.max_prefetch_count}" + ) + + @property + def max_prefetch_count(self): + return self.pool.num_processes * self.prefetch_multiplier + + @property + def _new_prefetch_count(self): + return self.qos.value + self.prefetch_multiplier + def __repr__(self): """``repr(self)``.""" return ''.format( diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 1e51c915e67..9b8c2a4387d 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -137,6 +137,28 @@ isn't recommended in production: :sig:`HUP` is disabled on macOS because of a limitation on that platform. +Automatic re-connection on connection loss to broker +==================================================== + +.. versionadded:: 5.3 + +Unless :setting:`broker_connection_retry_on_startup` is set to False, +Celery will automatically retry reconnecting to the broker after the first +connection loss. :setting:`broker_connection_retry` controls whether to automatically +retry reconnecting to the broker for subsequent reconnects. + +.. versionadded:: 5.1 + +If :setting:`worker_cancel_long_running_tasks_on_connection_loss` is set to True, +Celery will also cancel any long running task that is currently running. + +.. versionadded:: 5.3 + +Since the message broker does not track how many tasks were already fetched before +the connection was lost, Celery will reduce the prefetch count by the number of +tasks that are currently running multiplied by :setting:`worker_prefetch_multiplier`. +The prefetch count will be gradually restored to the maximum allowed after +each time a task that was running before the connection was lost is complete. .. _worker-process-signals: diff --git a/examples/app/myapp.py b/examples/app/myapp.py index 7ee8727095a..532b677fd84 100644 --- a/examples/app/myapp.py +++ b/examples/app/myapp.py @@ -22,6 +22,7 @@ $ celery -A myapp:app worker -l INFO """ +from time import sleep from celery import Celery @@ -30,11 +31,13 @@ broker='amqp://guest@localhost//', # ## add result backend here if needed. # backend='rpc' + task_acks_late=True ) @app.task def add(x, y): + sleep(10) return x + y diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 86619c1113b..7865cc3ac77 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -1,7 +1,7 @@ import errno import socket from collections import deque -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, call, patch import pytest from billiard.exceptions import RestartFreqExceeded @@ -32,6 +32,7 @@ def get_consumer(self, no_hub=False, **kwargs): **kwargs ) consumer.blueprint = Mock(name='blueprint') + consumer.pool.num_processes = 2 consumer._restart_state = Mock(name='_restart_state') consumer.connection = _amqp_connection() consumer.connection_errors = (socket.error, OSError,) @@ -40,6 +41,11 @@ def get_consumer(self, no_hub=False, **kwargs): class test_Consumer(ConsumerTestCase): + def setup(self): + @self.app.task(shared=False) + def add(x, y): + return x + y + self.add = add def test_repr(self): assert repr(self.get_consumer()) @@ -84,6 +90,72 @@ def test_update_prefetch_count(self): c._update_qos_eventually.assert_called_with(8) assert c.initial_prefetch_count == 10 * 10 + @pytest.mark.parametrize( + 'active_requests_count,expected_initial,expected_maximum', + [ + [0, 2, True], + [1, 1, False], + [2, 1, False] + ] + ) + @patch('celery.worker.consumer.consumer.active_requests', new_callable=set) + def test_restore_prefetch_count_on_restart(self, active_requests_mock, active_requests_count, + expected_initial, expected_maximum, subtests): + reqs = {Mock() for _ in range(active_requests_count)} + active_requests_mock.update(reqs) + + c = self.get_consumer() + c.qos = Mock() + c.blueprint = Mock() + + def bp_start(*_, **__): + if c.restart_count > 1: + c.blueprint.state = CLOSE + else: + raise ConnectionError + + c.blueprint.start.side_effect = bp_start + + c.start() + + with subtests.test("initial prefetch count is never 0"): + assert c.initial_prefetch_count != 0 + + with subtests.test(f"initial prefetch count is equal to {expected_initial}"): + assert c.initial_prefetch_count == expected_initial + + with subtests.test("maximum prefetch is reached"): + assert c._maximum_prefetch_restored is expected_maximum + + def test_create_task_handler(self, subtests): + c = self.get_consumer() + c.qos = MagicMock() + c.qos.value = 1 + c._maximum_prefetch_restored = False + + sig = self.add.s(2, 2) + message = self.task_message_from_sig(self.app, sig) + + def raise_exception(): + raise KeyError('Foo') + + def strategy(_, __, ack_log_error_promise, ___, ____): + ack_log_error_promise() + + c.strategies[sig.task] = strategy + c.call_soon = raise_exception + on_task_received = c.create_task_handler() + on_task_received(message) + + with subtests.test("initial prefetch count is never 0"): + assert c.initial_prefetch_count != 0 + + with subtests.test("initial prefetch count is 2"): + assert c.initial_prefetch_count == 2 + + with subtests.test("maximum prefetch is reached"): + assert c._maximum_prefetch_restored is True + def test_flush_events(self): c = self.get_consumer() c.event_dispatcher = None diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index 93589fdbf5a..6bf2a14a1d6 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -293,6 +293,7 @@ def loop_side_effect(): yield SyntaxError('bar') c = self.NoopConsumer(task_events=False, pool=BasePool()) c.loop.side_effect = loop_side_effect() + c.pool.num_processes = 2 c.connection_errors = (KeyError,) try: with pytest.raises(SyntaxError): From 871bb21493fff41f49e108a9c3e997144375e0bb Mon Sep 17 00:00:00 2001 From: David Pravec Date: Mon, 20 Jun 2022 18:10:19 +0200 Subject: [PATCH 0143/1051] Update CONTRIBUTORS.txt my biggest contribution was fixing a LimitedSet problem back at 2016, commit a320837d6e9a316daf86700f4b27798009e2dd7f --- CONTRIBUTORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index d847bb2492d..9eb5ec50180 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -207,6 +207,7 @@ Mike Attwood, 2016/01/22 David Harrigan, 2016/02/01 Ahmet Demir, 2016/02/27 Maxime Verger, 2016/02/29 +David Pravec, 2016/03/11 Alexander Oblovatniy, 2016/03/10 Komu Wairagu, 2016/04/03 Joe Sanford, 2016/04/11 From 4627b9364891af55c72e509eb1b7630114b1bb82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Wieczorek?= Date: Thu, 23 Jun 2022 09:57:57 +0200 Subject: [PATCH 0144/1051] Modify example debug_task to ignore result To allow: ``` debug_task.delay().get() ``` --- examples/django/proj/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/django/proj/celery.py b/examples/django/proj/celery.py index 9766a2ac2ee..ec3354dcdf3 100644 --- a/examples/django/proj/celery.py +++ b/examples/django/proj/celery.py @@ -17,6 +17,6 @@ app.autodiscover_tasks() -@app.task(bind=True) +@app.task(bind=True, ignore_result=True) def debug_task(self): print(f'Request: {self.request!r}') From 59263b0409e3f02dc16ca8a3bd1e42b5a3eba36d Mon Sep 17 00:00:00 2001 From: Gabriel Soldani <1268700+gabrielsoldani@users.noreply.github.com> Date: Sun, 26 Jun 2022 06:59:44 -0300 Subject: [PATCH 0145/1051] Minor refactors, found by static analysis (#7587) * Remove deprecated methods in `celery.local.Proxy` * Collapse conditionals for readability * Remove unused parameter `uuid` * Remove unused import `ClusterOptions` * Remove dangerous mutable default argument Continues work from #5478 * Remove always `None` and unused global variable * Remove unreachable `elif` block * Consolidate import statements * Add missing parameter to `os._exit()` * Add missing assert statement * Remove unused global `WindowsError` * Use `mkstemp` instead of deprecated `mktemp` * No need for `for..else` constructs in loops that don't break In these cases where the loop returns or raises instead of breaking, it is simpler to just put the code that runs after the loop completes right after the loop instead. * Use the previously unused parameter `compat_modules` Previously this parameter was always overwritten by the value of `COMPAT_MODULES.get(name, ())`, which was very likely unintentional. * Remove unused local variable `tz` * Make `assert_received` actually check for `is_received` Previously, it called `is_accepted`, which was likely a copy-paste mistake from the `assert_accepted` method. * Use previously unused `args` and `kwargs` params Unlike other backends' `__reduce__` methods, the one from `RedisBackend` simply overwrites `args` and `kwargs` instead of adding to them. This change makes it more in line with other backends. * Update celery/backends/filesystem.py Co-authored-by: Gabriel Soldani <1268700+gabrielsoldani@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/app/log.py | 5 ++--- celery/app/trace.py | 10 +++++----- celery/backends/couchbase.py | 4 ++-- celery/backends/filesystem.py | 7 +++---- celery/backends/redis.py | 3 +-- celery/bin/amqp.py | 2 +- celery/canvas.py | 7 +++---- celery/contrib/rdb.py | 3 +-- celery/contrib/testing/manager.py | 2 +- celery/events/cursesmon.py | 2 -- celery/local.py | 12 +----------- celery/schedules.py | 2 -- celery/utils/imports.py | 7 +++---- t/benchmarks/bench_worker.py | 2 +- t/integration/test_inspect.py | 2 +- t/unit/app/test_log.py | 4 ++-- t/unit/conftest.py | 6 ------ 17 files changed, 27 insertions(+), 53 deletions(-) diff --git a/celery/app/log.py b/celery/app/log.py index 6e03722b8a7..a4db1057791 100644 --- a/celery/app/log.py +++ b/celery/app/log.py @@ -64,9 +64,8 @@ def setup(self, loglevel=None, logfile=None, redirect_stdouts=False, handled = self.setup_logging_subsystem( loglevel, logfile, colorize=colorize, hostname=hostname, ) - if not handled: - if redirect_stdouts: - self.redirect_stdouts(redirect_level) + if not handled and redirect_stdouts: + self.redirect_stdouts(redirect_level) os.environ.update( CELERY_LOG_LEVEL=str(loglevel) if loglevel else '', CELERY_LOG_FILE=str(logfile) if logfile else '', diff --git a/celery/app/trace.py b/celery/app/trace.py index 778c4bb1994..5307620d342 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -369,7 +369,7 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True, from celery import canvas signature = canvas.maybe_signature # maybe_ does not clone if already - def on_error(request, exc, uuid, state=FAILURE, call_errbacks=True): + def on_error(request, exc, state=FAILURE, call_errbacks=True): if propagate: raise I = Info(state, exc) @@ -459,10 +459,10 @@ def trace_task(uuid, args, kwargs, request=None): traceback_clear(exc) except Retry as exc: I, R, state, retval = on_error( - task_request, exc, uuid, RETRY, call_errbacks=False) + task_request, exc, RETRY, call_errbacks=False) traceback_clear(exc) except Exception as exc: - I, R, state, retval = on_error(task_request, exc, uuid) + I, R, state, retval = on_error(task_request, exc) traceback_clear(exc) except BaseException: raise @@ -516,7 +516,7 @@ def trace_task(uuid, args, kwargs, request=None): uuid, retval, task_request, publish_result, ) except EncodeError as exc: - I, R, state, retval = on_error(task_request, exc, uuid) + I, R, state, retval = on_error(task_request, exc) else: Rstr = saferepr(R, resultrepr_maxsize) T = monotonic() - time_start @@ -566,7 +566,7 @@ def trace_task(uuid, args, kwargs, request=None): raise R = report_internal_error(task, exc) if task_request is not None: - I, _, _, _ = on_error(task_request, exc, uuid) + I, _, _, _ = on_error(task_request, exc) return trace_ok_t(R, I, T, Rstr) return trace_task diff --git a/celery/backends/couchbase.py b/celery/backends/couchbase.py index 25f729f1961..f01cb958ad4 100644 --- a/celery/backends/couchbase.py +++ b/celery/backends/couchbase.py @@ -8,9 +8,9 @@ try: from couchbase.auth import PasswordAuthenticator - from couchbase.cluster import Cluster, ClusterOptions + from couchbase.cluster import Cluster except ImportError: - Cluster = PasswordAuthenticator = ClusterOptions = None + Cluster = PasswordAuthenticator = None try: from couchbase_core._libcouchbase import FMT_AUTO diff --git a/celery/backends/filesystem.py b/celery/backends/filesystem.py index 6bc6bb141d0..22fd5dcfaad 100644 --- a/celery/backends/filesystem.py +++ b/celery/backends/filesystem.py @@ -53,10 +53,9 @@ def __init__(self, url=None, open=open, unlink=os.unlink, sep=os.sep, # Lets verify that we've everything setup right self._do_directory_test(b'.fs-backend-' + uuid().encode(encoding)) - def __reduce__(self, args=(), kwargs={}): - kwargs.update( - dict(url=self.url)) - return super().__reduce__(args, kwargs) + def __reduce__(self, args=(), kwargs=None): + kwargs = {} if not kwargs else kwargs + return super().__reduce__(args, {**kwargs, 'url': self.url}) def _find_path(self, url): if not url: diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 056f2c0eff9..97e186ec7f7 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -576,8 +576,7 @@ def client(self): def __reduce__(self, args=(), kwargs=None): kwargs = {} if not kwargs else kwargs return super().__reduce__( - (self.url,), {'expires': self.expires}, - ) + args, dict(kwargs, expires=self.expires, url=self.url)) if getattr(redis, "sentinel", None): diff --git a/celery/bin/amqp.py b/celery/bin/amqp.py index d94c91607bd..b42b1dae813 100644 --- a/celery/bin/amqp.py +++ b/celery/bin/amqp.py @@ -309,4 +309,4 @@ def basic_ack(amqp_context, delivery_tag): amqp_context.echo_ok() -repl = register_repl(amqp) +register_repl(amqp) diff --git a/celery/canvas.py b/celery/canvas.py index 3d92a4e0f55..9d3ad8ca246 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1469,10 +1469,9 @@ def _descend(cls, sig_obj): child_size = cls._descend(child_sig) if child_size > 0: return child_size - else: - # We have to just hope this chain is part of some encapsulating - # signature which is valid and can fire the chord body - return 0 + # We have to just hope this chain is part of some encapsulating + # signature which is valid and can fire the chord body + return 0 elif isinstance(sig_obj, chord): # The child chord's body counts toward this chord return cls._descend(sig_obj.body) diff --git a/celery/contrib/rdb.py b/celery/contrib/rdb.py index a34c0b52678..8ac8f70134e 100644 --- a/celery/contrib/rdb.py +++ b/celery/contrib/rdb.py @@ -132,8 +132,7 @@ def get_avail_port(self, host, port, search_limit=100, skew=+0): raise else: return _sock, this_port - else: - raise Exception(NO_AVAILABLE_PORT.format(self=self)) + raise Exception(NO_AVAILABLE_PORT.format(self=self)) def say(self, m): print(m, file=self.out) diff --git a/celery/contrib/testing/manager.py b/celery/contrib/testing/manager.py index 5c5c3e7797c..69b7e287615 100644 --- a/celery/contrib/testing/manager.py +++ b/celery/contrib/testing/manager.py @@ -153,7 +153,7 @@ def assert_accepted(self, ids, interval=0.5, def assert_received(self, ids, interval=0.5, desc='waiting for tasks to be received', **policy): return self.assert_task_worker_state( - self.is_accepted, ids, interval=interval, desc=desc, **policy + self.is_received, ids, interval=interval, desc=desc, **policy ) def assert_result_tasks_in_progress_or_completed( diff --git a/celery/events/cursesmon.py b/celery/events/cursesmon.py index 677c5e7556a..cff26befb36 100644 --- a/celery/events/cursesmon.py +++ b/celery/events/cursesmon.py @@ -273,8 +273,6 @@ def alert_callback(mx, my, xs): nexty = next(y) if nexty >= my - 1: subline = ' ' * 4 + '[...]' - elif nexty >= my: - break self.win.addstr( nexty, 3, abbr(' ' * 4 + subline, self.screen_width - 4), diff --git a/celery/local.py b/celery/local.py index 6eed19194dd..c2dd8444ed9 100644 --- a/celery/local.py +++ b/celery/local.py @@ -148,12 +148,6 @@ def __setitem__(self, key, value): def __delitem__(self, key): del self._get_current_object()[key] - def __setslice__(self, i, j, seq): - self._get_current_object()[i:j] = seq - - def __delslice__(self, i, j): - del self._get_current_object()[i:j] - def __setattr__(self, name, value): setattr(self._get_current_object(), name, value) @@ -199,9 +193,6 @@ def __iter__(self): def __contains__(self, i): return i in self._get_current_object() - def __getslice__(self, i, j): - return self._get_current_object()[i:j] - def __add__(self, other): return self._get_current_object() + other @@ -506,12 +497,11 @@ def create_module(name, attrs, cls_attrs=None, pkg=None, def recreate_module(name, compat_modules=None, by_module=None, direct=None, base=LazyModule, **attrs): - compat_modules = compat_modules or () + compat_modules = compat_modules or COMPAT_MODULES.get(name, ()) by_module = by_module or {} direct = direct or {} old_module = sys.modules[name] origins = get_origins(by_module) - compat_modules = COMPAT_MODULES.get(name, ()) _all = tuple(set(reduce( operator.add, diff --git a/celery/schedules.py b/celery/schedules.py index 0daa8b67300..ac571fe9d3e 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -539,9 +539,7 @@ def __setstate__(self, state): super().__init__(**state) def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd): - # pylint: disable=redefined-outer-name # caching global ffwd - tz = tz or self.tz last_run_at = self.maybe_make_aware(last_run_at) now = self.maybe_make_aware(self.now()) dow_num = last_run_at.isoweekday() % 7 # Sunday is day 0, not day 7 diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 6fcdf2e0e17..60f11e8316f 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -1,10 +1,9 @@ """Utilities related to importing modules and symbols by name.""" -import importlib import os import sys import warnings from contextlib import contextmanager -from importlib import reload +from importlib import import_module, reload try: from importlib.metadata import entry_points @@ -69,7 +68,7 @@ def cwd_in_path(): def find_module(module, path=None, imp=None): """Version of :func:`imp.find_module` supporting dots.""" if imp is None: - imp = importlib.import_module + imp = import_module with cwd_in_path(): try: return imp(module) @@ -100,7 +99,7 @@ def import_from_cwd(module, imp=None, package=None): precedence over modules located in `sys.path`. """ if imp is None: - imp = importlib.import_module + imp = import_module with cwd_in_path(): return imp(module, package=package) diff --git a/t/benchmarks/bench_worker.py b/t/benchmarks/bench_worker.py index 5c9f6f46ba3..55503716d51 100644 --- a/t/benchmarks/bench_worker.py +++ b/t/benchmarks/bench_worker.py @@ -60,7 +60,7 @@ def it(_, n): n, total, n / (total + .0), )) import os - os._exit() + os._exit(0) it.cur += 1 diff --git a/t/integration/test_inspect.py b/t/integration/test_inspect.py index 60332f0071d..35b9fead9e1 100644 --- a/t/integration/test_inspect.py +++ b/t/integration/test_inspect.py @@ -51,7 +51,7 @@ def test_registered(self, inspect): # TODO: We can check also the exact values of the registered methods ret = inspect.registered() assert len(ret) == 1 - len(ret[NODENAME]) > 0 + assert len(ret[NODENAME]) > 0 for task_name in ret[NODENAME]: assert isinstance(task_name, str) diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py index 60b46b5ee31..c3a425447a3 100644 --- a/t/unit/app/test_log.py +++ b/t/unit/app/test_log.py @@ -2,7 +2,7 @@ import sys from collections import defaultdict from io import StringIO -from tempfile import mktemp +from tempfile import mkstemp from unittest.mock import Mock, patch import pytest @@ -210,7 +210,7 @@ def test_setup_logger_no_handlers_stream(self, restore_logging): @patch('os.fstat') def test_setup_logger_no_handlers_file(self, *args): - tempfile = mktemp(suffix='unittest', prefix='celery') + _, tempfile = mkstemp(suffix='unittest', prefix='celery') with patch('builtins.open') as osopen: with conftest.restore_logging_context_manager(): files = defaultdict(StringIO) diff --git a/t/unit/conftest.py b/t/unit/conftest.py index ecd843a4c44..9b0b46921d0 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -30,12 +30,6 @@ 'celery_parameters' ) -try: - WindowsError = WindowsError -except NameError: - - class WindowsError(Exception): - pass PYPY3 = getattr(sys, 'pypy_version_info', None) and sys.version_info[0] > 3 From d04c5ddd9523dc927ebc1d96a59f096b0e06efb6 Mon Sep 17 00:00:00 2001 From: EricAtORS Date: Wed, 29 Jun 2022 02:12:24 -0700 Subject: [PATCH 0146/1051] Improve workflow primitive subclassing (#7593) * let group, chain and chunks create the subclassses when recreating from dict * add tests for subclassing workflow primitives * force the creation of the proper chain type when subclassing chain Co-authored-by: Eric Yen --- celery/canvas.py | 20 +++-- t/unit/tasks/test_canvas.py | 157 +++++++++++++++++++++++++----------- 2 files changed, 124 insertions(+), 53 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 9d3ad8ca246..7f7dffd6f80 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -558,7 +558,7 @@ def from_dict(cls, d, app=None): if isinstance(tasks, tuple): # aaaargh tasks = d['kwargs']['tasks'] = list(tasks) tasks = [maybe_signature(task, app=app) for task in tasks] - return _chain(tasks, app=app, **d['options']) + return cls(tasks, app=app, **d['options']) def __init__(self, *tasks, **options): tasks = (regen(tasks[0]) if len(tasks) == 1 and is_list(tasks[0]) @@ -582,11 +582,13 @@ def __or__(self, other): if not tasks: # If the chain is empty, return the group return other - return _chain(seq_concat_item( + # use type(self) for _chain subclasses + return type(self)(seq_concat_item( tasks, other), app=self._app) elif isinstance(other, _chain): # chain | chain -> chain - return _chain(seq_concat_seq( + # use type(self) for _chain subclasses + return type(self)(seq_concat_seq( self.unchain_tasks(), other.unchain_tasks()), app=self._app) elif isinstance(other, Signature): if self.tasks and isinstance(self.tasks[-1], group): @@ -602,7 +604,8 @@ def __or__(self, other): return sig else: # chain | task -> chain - return _chain(seq_concat_item( + # use type(self) for _chain subclasses + return type(self)(seq_concat_item( self.unchain_tasks(), other), app=self._app) else: return NotImplemented @@ -894,7 +897,10 @@ def __new__(cls, *tasks, **kwargs): tasks = tasks[0] if len(tasks) == 1 else tasks # if is_list(tasks) and len(tasks) == 1: # return super(chain, cls).__new__(cls, tasks, **kwargs) - return reduce(operator.or_, tasks, chain()) + new_instance = reduce(operator.or_, tasks, _chain()) + if cls != chain and isinstance(new_instance, _chain) and not isinstance(new_instance, cls): + return super().__new__(cls, new_instance.tasks, **kwargs) + return new_instance return super().__new__(cls, *tasks, **kwargs) @@ -957,7 +963,7 @@ class chunks(Signature): @classmethod def from_dict(cls, d, app=None): - return chunks(*cls._unpack_args(d['kwargs']), app=app, **d['options']) + return cls(*cls._unpack_args(d['kwargs']), app=app, **d['options']) def __init__(self, task, it, n, **options): super().__init__('celery.chunks', (), @@ -1047,7 +1053,7 @@ def from_dict(cls, d, app=None): d["kwargs"]["tasks"] = rebuilt_tasks = type(orig_tasks)( maybe_signature(task, app=app) for task in orig_tasks ) - return group(rebuilt_tasks, app=app, **d['options']) + return cls(rebuilt_tasks, app=app, **d['options']) def __init__(self, *tasks, **options): if len(tasks) == 1: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index eefdef8797b..f673159954b 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -49,6 +49,34 @@ def div(x, y): self.div = div +@Signature.register_type() +class chord_subclass(chord): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.subtask_type = "chord_subclass" + + +@Signature.register_type() +class group_subclass(group): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.subtask_type = "group_subclass" + + +@Signature.register_type() +class chain_subclass(chain): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.subtask_type = "chain_subclass" + + +@Signature.register_type() +class chunks_subclass(chunks): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.subtask_type = "chunks_subclass" + + class test_Signature(CanvasCase): def test_getitem_property_class(self): @@ -268,6 +296,13 @@ def test_apply(self): class test_chunks(CanvasCase): + def test_chunks_preserves_state(self): + x = self.add.chunks(range(100), 10) + d = dict(x) + d['subtask_type'] = "chunks_subclass" + isinstance(chunks_subclass.from_dict(d), chunks_subclass) + isinstance(chunks_subclass.from_dict(d).clone(), chunks_subclass) + def test_chunks(self): x = self.add.chunks(range(100), 10) assert dict(chunks.from_dict(dict(x), app=self.app)) == dict(x) @@ -294,11 +329,13 @@ def test_chain_of_chain_with_a_single_task(self): s = self.add.s(1, 1) assert chain([chain(s)]).tasks == list(chain(s).tasks) - def test_clone_preserves_state(self): - x = chain(self.add.s(i, i) for i in range(10)) + @pytest.mark.parametrize("chain_type", (_chain, chain_subclass)) + def test_clone_preserves_state(self, chain_type): + x = chain_type(self.add.s(i, i) for i in range(10)) assert x.clone().tasks == x.tasks assert x.clone().kwargs == x.kwargs assert x.clone().args == x.args + assert isinstance(x.clone(), chain_type) def test_repr(self): x = self.add.s(2, 2) | self.add.s(2) @@ -311,24 +348,30 @@ def test_apply_async(self): assert result.parent.parent assert result.parent.parent.parent is None - def test_splices_chains(self): - c = chain( + @pytest.mark.parametrize("chain_type", (_chain, chain_subclass)) + def test_splices_chains(self, chain_type): + c = chain_type( self.add.s(5, 5), - chain(self.add.s(6), self.add.s(7), self.add.s(8), app=self.app), + chain_type(self.add.s(6), self.add.s(7), self.add.s(8), app=self.app), app=self.app, ) c.freeze() tasks, _ = c._frozen assert len(tasks) == 4 + assert isinstance(c, chain_type) - def test_from_dict_no_tasks(self): - assert chain.from_dict(dict(chain(app=self.app)), app=self.app) + @pytest.mark.parametrize("chain_type", [_chain, chain_subclass]) + def test_from_dict_no_tasks(self, chain_type): + assert chain_type.from_dict(dict(chain_type(app=self.app)), app=self.app) + assert isinstance(chain_type.from_dict(dict(chain_type(app=self.app)), app=self.app), chain_type) - def test_from_dict_full_subtasks(self): - c = chain(self.add.si(1, 2), self.add.si(3, 4), self.add.si(5, 6)) + @pytest.mark.parametrize("chain_type", [_chain, chain_subclass]) + def test_from_dict_full_subtasks(self, chain_type): + c = chain_type(self.add.si(1, 2), self.add.si(3, 4), self.add.si(5, 6)) serialized = json.loads(json.dumps(c)) - deserialized = chain.from_dict(serialized) + deserialized = chain_type.from_dict(serialized) assert all(isinstance(task, Signature) for task in deserialized.tasks) + assert isinstance(deserialized, chain_type) @pytest.mark.usefixtures('depends_on_current_app') def test_app_falls_back_to_default(self): @@ -628,6 +671,11 @@ def test_reverse(self): assert isinstance(signature(x), group) assert isinstance(signature(dict(x)), group) + def test_reverse_with_subclass(self): + x = group_subclass([self.add.s(2, 2), self.add.s(4, 4)]) + assert isinstance(signature(x), group_subclass) + assert isinstance(signature(dict(x)), group_subclass) + def test_cannot_link_on_group(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) with pytest.raises(TypeError): @@ -711,29 +759,36 @@ def test_apply_async_with_parent(self): finally: _task_stack.pop() - def test_from_dict(self): - x = group([self.add.s(2, 2), self.add.s(4, 4)]) + @pytest.mark.parametrize("group_type", (group, group_subclass)) + def test_from_dict(self, group_type): + x = group_type([self.add.s(2, 2), self.add.s(4, 4)]) x['args'] = (2, 2) - assert group.from_dict(dict(x)) + value = group_type.from_dict(dict(x)) + assert value and isinstance(value, group_type) x['args'] = None - assert group.from_dict(dict(x)) + value = group_type.from_dict(dict(x)) + assert value and isinstance(value, group_type) - def test_from_dict_deep_deserialize(self): - original_group = group([self.add.s(1, 2)] * 42) + @pytest.mark.parametrize("group_type", (group, group_subclass)) + def test_from_dict_deep_deserialize(self, group_type): + original_group = group_type([self.add.s(1, 2)] * 42) serialized_group = json.loads(json.dumps(original_group)) - deserialized_group = group.from_dict(serialized_group) + deserialized_group = group_type.from_dict(serialized_group) + assert isinstance(deserialized_group, group_type) assert all( isinstance(child_task, Signature) for child_task in deserialized_group.tasks ) - def test_from_dict_deeper_deserialize(self): - inner_group = group([self.add.s(1, 2)] * 42) - outer_group = group([inner_group] * 42) + @pytest.mark.parametrize("group_type", (group, group_subclass)) + def test_from_dict_deeper_deserialize(self, group_type): + inner_group = group_type([self.add.s(1, 2)] * 42) + outer_group = group_type([inner_group] * 42) serialized_group = json.loads(json.dumps(outer_group)) - deserialized_group = group.from_dict(serialized_group) + deserialized_group = group_type.from_dict(serialized_group) + assert isinstance(deserialized_group, group_type) assert all( - isinstance(child_task, Signature) + isinstance(child_task, group_type) for child_task in deserialized_group.tasks ) assert all( @@ -1012,10 +1067,11 @@ def build_generator(): # Access it again to make sure the generator is not further evaluated c.app - def test_reverse(self): - x = chord([self.add.s(2, 2), self.add.s(4, 4)], body=self.mul.s(4)) - assert isinstance(signature(x), chord) - assert isinstance(signature(dict(x)), chord) + @pytest.mark.parametrize("chord_type", [chord, chord_subclass]) + def test_reverse(self, chord_type): + x = chord_type([self.add.s(2, 2), self.add.s(4, 4)], body=self.mul.s(4)) + assert isinstance(signature(x), chord_type) + assert isinstance(signature(dict(x)), chord_type) def test_clone_clones_body(self): x = chord([self.add.s(2, 2), self.add.s(4, 4)], body=self.mul.s(4)) @@ -1252,15 +1308,18 @@ def test_repr(self): x.kwargs['body'] = None assert 'without body' in repr(x) - def test_freeze_tasks_body_is_group(self, subtests): + @pytest.mark.parametrize("group_type", [group, group_subclass]) + def test_freeze_tasks_body_is_group(self, subtests, group_type): # Confirm that `group index` values counting up from 0 are set for # elements of a chord's body when the chord is encapsulated in a group body_elem = self.add.s() - chord_body = group([body_elem] * 42) + chord_body = group_type([body_elem] * 42) chord_obj = chord(self.add.s(), body=chord_body) - top_group = group([chord_obj]) + top_group = group_type([chord_obj]) + # We expect the body to be the signature we passed in before we freeze - with subtests.test(msg="Validate body tasks are retained"): + with subtests.test(msg="Validate body type and tasks are retained"): + assert isinstance(chord_obj.body, group_type) assert all( embedded_body_elem is body_elem for embedded_body_elem in chord_obj.body.tasks @@ -1273,6 +1332,8 @@ def test_freeze_tasks_body_is_group(self, subtests): with subtests.test( msg="Validate body group indices count from 0 after freezing" ): + assert isinstance(chord_obj.body, group_type) + assert all( embedded_body_elem is not body_elem for embedded_body_elem in chord_obj.body.tasks @@ -1310,17 +1371,19 @@ def chord_add(): _state.task_join_will_block = fixture_task_join_will_block result.task_join_will_block = fixture_task_join_will_block - def test_from_dict(self): + @pytest.mark.parametrize("chord_type", [chord, chord_subclass]) + def test_from_dict(self, chord_type): header = self.add.s(1, 2) - original_chord = chord(header=header) - rebuilt_chord = chord.from_dict(dict(original_chord)) - assert isinstance(rebuilt_chord, chord) + original_chord = chord_type(header=header) + rebuilt_chord = chord_type.from_dict(dict(original_chord)) + assert isinstance(rebuilt_chord, chord_type) - def test_from_dict_with_body(self): + @pytest.mark.parametrize("chord_type", [chord, chord_subclass]) + def test_from_dict_with_body(self, chord_type): header = body = self.add.s(1, 2) - original_chord = chord(header=header, body=body) - rebuilt_chord = chord.from_dict(dict(original_chord)) - assert isinstance(rebuilt_chord, chord) + original_chord = chord_type(header=header, body=body) + rebuilt_chord = chord_type.from_dict(dict(original_chord)) + assert isinstance(rebuilt_chord, chord_type) def test_from_dict_deep_deserialize(self, subtests): header = body = self.add.s(1, 2) @@ -1337,8 +1400,9 @@ def test_from_dict_deep_deserialize(self, subtests): with subtests.test(msg="Verify chord body is deserialized"): assert isinstance(deserialized_chord.body, Signature) - def test_from_dict_deep_deserialize_group(self, subtests): - header = body = group([self.add.s(1, 2)] * 42) + @pytest.mark.parametrize("group_type", [group, group_subclass]) + def test_from_dict_deep_deserialize_group(self, subtests, group_type): + header = body = group_type([self.add.s(1, 2)] * 42) original_chord = chord(header=header, body=body) serialized_chord = json.loads(json.dumps(original_chord)) deserialized_chord = chord.from_dict(serialized_chord) @@ -1350,22 +1414,23 @@ def test_from_dict_deep_deserialize_group(self, subtests): ): assert all( isinstance(child_task, Signature) - and not isinstance(child_task, group) + and not isinstance(child_task, group_type) for child_task in deserialized_chord.tasks ) # A body which is a group remains as it we passed in with subtests.test( msg="Validate chord body is deserialized and not unpacked" ): - assert isinstance(deserialized_chord.body, group) + assert isinstance(deserialized_chord.body, group_type) assert all( isinstance(body_child_task, Signature) for body_child_task in deserialized_chord.body.tasks ) - def test_from_dict_deeper_deserialize_group(self, subtests): - inner_group = group([self.add.s(1, 2)] * 42) - header = body = group([inner_group] * 42) + @pytest.mark.parametrize("group_type", [group, group_subclass]) + def test_from_dict_deeper_deserialize_group(self, subtests, group_type): + inner_group = group_type([self.add.s(1, 2)] * 42) + header = body = group_type([inner_group] * 42) original_chord = chord(header=header, body=body) serialized_chord = json.loads(json.dumps(original_chord)) deserialized_chord = chord.from_dict(serialized_chord) @@ -1376,7 +1441,7 @@ def test_from_dict_deeper_deserialize_group(self, subtests): msg="Validate chord header tasks are deserialized and unpacked" ): assert all( - isinstance(child_task, group) + isinstance(child_task, group_type) for child_task in deserialized_chord.tasks ) assert all( From 34fc87c58dba0033a2bbcf3202bf3acbeec3f1b2 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 29 Jun 2022 16:51:22 +0600 Subject: [PATCH 0147/1051] test kombu>=5.3.0a1,<6.0 (#7598) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 0f7d1d4941d..ef8bb368ea0 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ pytz>=2021.3 billiard>=3.6.4.0,<5.0 -kombu>=5.2.3,<6.0 +kombu>=5.3.0a1,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 1c4ff33bd22cf94e297bd6449a06b5a30c2c1fbc Mon Sep 17 00:00:00 2001 From: dobosevych Date: Wed, 29 Jun 2022 14:08:14 +0300 Subject: [PATCH 0148/1051] Canvas Header Stamping (#7384) * Strip down the header-stamping PR to the basics. * Serialize groups. * Add groups to result backend meta data. * Fix spelling mistake. * Revert changes to canvas.py * Revert changes to app/base.py * Add stamping implementation to canvas.py * Send task to AMQP with groups. * Successfully pass single group to result. * _freeze_gid dict merge fixed * First draft of the visitor API. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * OptionsVisitor created * Fixed canvas.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added test for simple test for chord and fixed chord implementation * Changed _IMMUTABLE_OPTIONS * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed list order * Fixed tests (stamp test and chord test), fixed order in groups * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixed lint and elements * Changed implementation of stamp API and fix lint * Added documentation to Stamping API. Added chord with groups test * Implemented stamping inside replace and added test for an implementation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Splitted into subtests * Group stamping rollback * group.id is None fixed * Added integration test * Added integration test * apply_async fixed * Integration test and test_chord fixed * Lint fixed * chord freeze fixed * Minor fixes. * Chain apply_async fixed and tests fixed * lint fixed * Added integration test for chord * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * type -> isinstance * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Redo header stamping (#7341) * _freeze_gid dict merge fixed * OptionsVisitor created * Fixed canvas.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added test for simple test for chord and fixed chord implementation * Changed _IMMUTABLE_OPTIONS * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed list order * Fixed tests (stamp test and chord test), fixed order in groups * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixed lint and elements * Changed implementation of stamp API and fix lint * Added documentation to Stamping API. Added chord with groups test * Implemented stamping inside replace and added test for an implementation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Splitted into subtests * Group stamping rollback * group.id is None fixed * Added integration test * Added integration test * apply_async fixed * Integration test and test_chord fixed * Lint fixed * chord freeze fixed * Minor fixes. * Chain apply_async fixed and tests fixed * lint fixed * Added integration test for chord * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * type -> isinstance * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Omer Katz * Added stamping mechanism * Manual stamping improved * flake8 fixed * Added subtests * Add comma. * Moved groups to stamps * Fixed chord and added test for that * Strip down the header-stamping PR to the basics. * Serialize groups. * Add groups to result backend meta data. * Fix spelling mistake. * Revert changes to canvas.py * Revert changes to app/base.py * Add stamping implementation to canvas.py * Send task to AMQP with groups. * Successfully pass single group to result. * _freeze_gid dict merge fixed * First draft of the visitor API. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * OptionsVisitor created * Fixed canvas.py * Added test for simple test for chord and fixed chord implementation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Changed _IMMUTABLE_OPTIONS * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed chord interface * Fixed list order * Fixed tests (stamp test and chord test), fixed order in groups * Fixed lint and elements * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Changed implementation of stamp API and fix lint * Added documentation to Stamping API. Added chord with groups test * Implemented stamping inside replace and added test for an implementation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Added test additonal tests for chord, improved coverage * Splitted into subtests * Group stamping rollback * group.id is None fixed * Added integration test * Added integration test * apply_async fixed * Integration test and test_chord fixed * Lint fixed * chord freeze fixed * Minor fixes. * Chain apply_async fixed and tests fixed * lint fixed * Added integration test for chord * type -> isinstance * Added stamping mechanism * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Manual stamping improved * fail_ci_if_error uncommented * flake8 fixed * Added subtests * Changes * Add comma. * Fixed chord and added test for that * canvas.py fixed * Test chord.py fixed * Fixed stamped_headers * collections import fixed * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * collections import fixed * Update celery/backends/base.py Co-authored-by: Omer Katz * ampq.py fixed * Refrain from using deprecated import path. * Fix test_complex_chain regression. Whenever we stamp a group we need to freeze it first if it wasn't already frozen. Somewhere along the line, the group id changed because we were freezing twice. This commit places the stamping operation after preparing the chain's steps which fixes the problem somehow. We don't know why yet. * Fixed integration tests * Fixed integration tests * Fixed integration tests * Fixed integration tests * Fixed issues with maybe_list. Add documentation * Fixed potential issue with integration tests * Fixed issues with _regen * Fixed issues with _regen * Fixed test_generator issues * Fixed _regen stamping * Fixed _regen stamping * Fixed TimeOut issue * Fixed TimeOut issue * Fixed TimeOut issue * Update docs/userguide/canvas.rst Co-authored-by: Omer Katz * Fixed Couchbase * Better stamping intro * New GroupVisitor example * Adjust documentation. Co-authored-by: Naomi Elstein Co-authored-by: Omer Katz Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz --- .github/workflows/python-package.yml | 1 + celery/app/amqp.py | 45 +- celery/app/base.py | 4 +- celery/app/task.py | 18 +- celery/backends/base.py | 7 +- celery/canvas.py | 308 ++++++++++++-- celery/utils/functional.py | 4 + celery/worker/request.py | 12 + docs/userguide/canvas.rst | 88 ++++ t/integration/conftest.py | 3 +- t/integration/tasks.py | 7 + t/integration/test_canvas.py | 225 ++++++---- t/unit/conftest.py | 2 +- t/unit/tasks/test_canvas.py | 595 +++++++++++++++++++++++++-- t/unit/tasks/test_chord.py | 33 +- 15 files changed, 1195 insertions(+), 157 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 60385d03a27..cf4afb9b00a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -105,6 +105,7 @@ jobs: - name: Install apt packages run: | sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 diff --git a/celery/app/amqp.py b/celery/app/amqp.py index 06ce1d1b3c6..e3245811035 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -284,7 +284,9 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, time_limit=None, soft_time_limit=None, create_sent_event=False, root_id=None, parent_id=None, shadow=None, chain=None, now=None, timezone=None, - origin=None, ignore_result=False, argsrepr=None, kwargsrepr=None): + origin=None, ignore_result=False, argsrepr=None, kwargsrepr=None, stamped_headers=None, + **options): + args = args or () kwargs = kwargs or {} if not isinstance(args, (list, tuple)): @@ -319,25 +321,30 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, if not root_id: # empty root_id defaults to task_id root_id = task_id + stamps = {header: maybe_list(options[header]) for header in stamped_headers or []} + headers = { + 'lang': 'py', + 'task': name, + 'id': task_id, + 'shadow': shadow, + 'eta': eta, + 'expires': expires, + 'group': group_id, + 'group_index': group_index, + 'retries': retries, + 'timelimit': [time_limit, soft_time_limit], + 'root_id': root_id, + 'parent_id': parent_id, + 'argsrepr': argsrepr, + 'kwargsrepr': kwargsrepr, + 'origin': origin or anon_nodename(), + 'ignore_result': ignore_result, + 'stamped_headers': stamped_headers, + 'stamps': stamps, + } + return task_message( - headers={ - 'lang': 'py', - 'task': name, - 'id': task_id, - 'shadow': shadow, - 'eta': eta, - 'expires': expires, - 'group': group_id, - 'group_index': group_index, - 'retries': retries, - 'timelimit': [time_limit, soft_time_limit], - 'root_id': root_id, - 'parent_id': parent_id, - 'argsrepr': argsrepr, - 'kwargsrepr': kwargsrepr, - 'origin': origin or anon_nodename(), - 'ignore_result': ignore_result, - }, + headers=headers, properties={ 'correlation_id': task_id, 'reply_to': reply_to or '', diff --git a/celery/app/base.py b/celery/app/base.py index 842e3416070..c21e290ed74 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -766,6 +766,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, options.setdefault('priority', parent.request.delivery_info.get('priority')) + # alias for 'task_as_v2' message = amqp.create_task_message( task_id, name, args, kwargs, countdown, eta, group_id, group_index, expires, retries, chord, @@ -774,8 +775,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, self.conf.task_send_sent_event, root_id, parent_id, shadow, chain, ignore_result=ignore_result, - argsrepr=options.get('argsrepr'), - kwargsrepr=options.get('kwargsrepr'), + **options ) if connection: diff --git a/celery/app/task.py b/celery/app/task.py index b594f063ddf..212bc772e01 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -8,7 +8,7 @@ from celery import current_app, states from celery._state import _task_stack -from celery.canvas import _chain, group, signature +from celery.canvas import GroupStampingVisitor, _chain, group, signature from celery.exceptions import Ignore, ImproperlyConfigured, MaxRetriesExceededError, Reject, Retry from celery.local import class_property from celery.result import EagerResult, denied_join_result @@ -93,6 +93,8 @@ class Context: taskset = None # compat alias to group timelimit = None utc = None + stamped_headers = None + stamps = None def __init__(self, *args, **kwargs): self.update(*args, **kwargs) @@ -794,8 +796,14 @@ def apply(self, args=None, kwargs=None, 'exchange': options.get('exchange'), 'routing_key': options.get('routing_key'), 'priority': options.get('priority'), - }, + } } + if 'stamped_headers' in options: + request['stamped_headers'] = maybe_list(options['stamped_headers']) + request['stamps'] = { + header: maybe_list(options.get(header, [])) for header in request['stamped_headers'] + } + tb = None tracer = build_tracer( task.name, task, eager=True, @@ -942,6 +950,12 @@ def replace(self, sig): # retain their original task IDs as well for t in reversed(self.request.chain or []): sig |= signature(t, app=self.app) + # Stamping sig with parents groups + stamped_headers = self.request.stamped_headers + if self.request.stamps: + groups = self.request.stamps.get("groups") + sig.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + # Finally, either apply or delay the new signature! if self.request.is_eager: return sig.apply().get() diff --git a/celery/backends/base.py b/celery/backends/base.py index 281c5de0504..e851c8189f6 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -230,7 +230,7 @@ def _call_task_errbacks(self, request, exc, traceback): hasattr(errback.type, '__header__') and # workaround to support tasks with bind=True executed as - # link errors. Otherwise retries can't be used + # link errors. Otherwise, retries can't be used not isinstance(errback.type.__header__, partial) and arity_greater(errback.type.__header__, 1) ): @@ -488,8 +488,11 @@ def _get_result_meta(self, result, 'retries': getattr(request, 'retries', None), 'queue': request.delivery_info.get('routing_key') if hasattr(request, 'delivery_info') and - request.delivery_info else None + request.delivery_info else None, } + if getattr(request, 'stamps'): + request_meta['stamped_headers'] = request.stamped_headers + request_meta.update(request.stamps) if encode: # args and kwargs need to be encoded properly before saving diff --git a/celery/canvas.py b/celery/canvas.py index 7f7dffd6f80..4a32ae7fc5a 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -7,6 +7,7 @@ import itertools import operator +from abc import ABCMeta, abstractmethod from collections import deque from collections.abc import MutableSequence from copy import deepcopy @@ -56,6 +57,155 @@ def task_name_from(task): return getattr(task, 'name', task) +def _stamp_regen_task(task, visitor, **headers): + task.stamp(visitor=visitor, **headers) + return task + + +def _merge_dictionaries(d1, d2): + for key, value in d1.items(): + if key in d2: + if isinstance(value, dict): + _merge_dictionaries(d1[key], d2[key]) + else: + if isinstance(value, (int, float, str)): + d1[key] = [value] + if isinstance(d2[key], list): + d1[key].extend(d2[key]) + else: + if d1[key] is None: + d1[key] = [] + else: + d1[key] = list(d1[key]) + d1[key].append(d2[key]) + for key, value in d2.items(): + if key not in d1: + d1[key] = value + + +class StampingVisitor(metaclass=ABCMeta): + """Stamping API. A class that provides a stamping API possibility for + canvas primitives. If you want to implement stamping behavior for + a canvas primitive override method that represents it. + """ + + @abstractmethod + def on_group_start(self, group, **headers) -> dict: + """Method that is called on group stamping start. + + Arguments: + group (group): Group that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + pass + + def on_group_end(self, group, **headers) -> None: + """Method that is called on group stamping end. + + Arguments: + group (group): Group that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + """ + pass + + @abstractmethod + def on_chain_start(self, chain, **headers) -> dict: + """Method that is called on chain stamping start. + + Arguments: + chain (chain): Chain that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + pass + + def on_chain_end(self, chain, **headers) -> None: + """Method that is called on chain stamping end. + + Arguments: + chain (chain): Chain that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + """ + pass + + @abstractmethod + def on_signature(self, sig, **headers) -> dict: + """Method that is called on signature stamping. + + Arguments: + sig (Signature): Signature that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + pass + + def on_chord_header_start(self, chord, **header) -> dict: + """Method that is called on сhord header stamping start. + + Arguments: + chord (chord): chord that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + if not isinstance(chord.tasks, group): + chord.tasks = group(chord.tasks) + return self.on_group_start(chord.tasks, **header) + + def on_chord_header_end(self, chord, **header) -> None: + """Method that is called on сhord header stamping end. + + Arguments: + chord (chord): chord that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + """ + self.on_group_end(chord.tasks, **header) + + def on_chord_body(self, chord, **header) -> dict: + """Method that is called on chord body stamping. + + Arguments: + chord (chord): chord that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + return self.on_signature(chord.body, **header) + + +class GroupStampingVisitor(StampingVisitor): + """ + Group stamping implementation based on Stamping API. + """ + + def __init__(self, groups=None, stamped_headers=None): + self.groups = groups or [] + self.stamped_headers = stamped_headers or [] + if "groups" not in self.stamped_headers: + self.stamped_headers.append("groups") + + def on_group_start(self, group, **headers) -> dict: + if group.id is None: + group.set(task_id=uuid()) + + if group.id not in self.groups: + self.groups.append(group.id) + return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} + + def on_group_end(self, group, **headers) -> None: + self.groups.pop() + + def on_chain_start(self, chain, **headers) -> dict: + return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} + + def on_signature(self, sig, **headers) -> dict: + return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} + + @abstract.CallableSignature.register class Signature(dict): """Task Signature. @@ -118,7 +268,7 @@ class Signature(dict): _app = _type = None # The following fields must not be changed during freezing/merging because # to do so would disrupt completion of parent tasks - _IMMUTABLE_OPTIONS = {"group_id"} + _IMMUTABLE_OPTIONS = {"group_id", "stamped_headers"} @classmethod def register_type(cls, name=None): @@ -178,6 +328,9 @@ def apply(self, args=None, kwargs=None, **options): """ args = args if args else () kwargs = kwargs if kwargs else {} + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) # Extra options set to None are dismissed options = {k: v for k, v in options.items() if v is not None} # For callbacks: extra args are prepended to the stored args. @@ -201,6 +354,9 @@ def apply_async(self, args=None, kwargs=None, route_name=None, **options): """ args = args if args else () kwargs = kwargs if kwargs else {} + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) # Extra options set to None are dismissed options = {k: v for k, v in options.items() if v is not None} try: @@ -225,10 +381,13 @@ def _merge(self, args=None, kwargs=None, options=None, force=False): # override values in `self.options` except for keys which are # noted as being immutable (unrelated to signature immutability) # implying that allowing their value to change would stall tasks - new_options = dict(self.options, **{ + immutable_options = self._IMMUTABLE_OPTIONS + if "stamped_headers" in self.options: + immutable_options = self._IMMUTABLE_OPTIONS.union(set(self.options["stamped_headers"])) + new_options = {**self.options, **{ k: v for k, v in options.items() - if k not in self._IMMUTABLE_OPTIONS or k not in self.options - }) + if k not in immutable_options or k not in self.options + }} else: new_options = self.options if self.immutable and not force: @@ -334,6 +493,21 @@ def set(self, immutable=None, **options): def set_immutable(self, immutable): self.immutable = immutable + def stamp(self, visitor=None, **headers): + """Apply this task asynchronously. + + Arguments: + visitor (StampingVisitor): Visitor API object. + headers (Dict): Stamps that should be added to headers. + """ + headers = headers.copy() + if visitor is not None: + headers.update(visitor.on_signature(self, **headers)) + else: + headers["stamped_headers"] = [header for header in headers.keys() if header not in self.options] + _merge_dictionaries(headers, self.options) + return self.set(**headers) + def _with_list_option(self, key): items = self.options.setdefault(key, []) if not isinstance(items, MutableSequence): @@ -633,6 +807,7 @@ def apply_async(self, args=None, kwargs=None, **options): args = args if args else () kwargs = kwargs if kwargs else [] app = self.app + if app.conf.task_always_eager: with allow_join_result(): return self.apply(args, kwargs, **options) @@ -659,6 +834,10 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, task_id, group_id, chord, group_index=group_index, ) + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + if results_from_prepare: if link: tasks[0].extend_list_option('link', link) @@ -689,6 +868,17 @@ def freeze(self, _id=None, group_id=None, chord=None, ) return results[0] + def stamp(self, visitor=None, **headers): + if visitor is not None: + headers.update(visitor.on_chain_start(self, **headers)) + + super().stamp(visitor=visitor, **headers) + for task in self.tasks: + task.stamp(visitor=visitor, **headers) + + if visitor is not None: + visitor.on_chain_end(self, **headers) + def prepare_steps(self, args, kwargs, tasks, root_id=None, parent_id=None, link_error=None, app=None, last_task_id=None, group_id=None, chord_body=None, @@ -728,7 +918,7 @@ def prepare_steps(self, args, kwargs, tasks, task = from_dict(task, app=app) if isinstance(task, group): # when groups are nested, they are unrolled - all tasks within - # groups within groups should be called in parallel + # groups should be called in parallel task = maybe_unroll_group(task) # first task gets partial args from chain @@ -816,6 +1006,9 @@ def prepare_steps(self, args, kwargs, tasks, def apply(self, args=None, kwargs=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) last, (fargs, fkwargs) = None, (args, kwargs) for task in self.tasks: res = task.clone(fargs, fkwargs).apply( @@ -1097,6 +1290,11 @@ def apply_async(self, args=None, kwargs=None, add_to_parent=True, options, group_id, root_id = self._freeze_gid(options) tasks = self._prepared(self.tasks, [], group_id, root_id, app) + + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + p = barrier() results = list(self._apply_tasks(tasks, producer, app, p, args=args, kwargs=kwargs, **options)) @@ -1120,6 +1318,9 @@ def apply_async(self, args=None, kwargs=None, add_to_parent=True, def apply(self, args=None, kwargs=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) app = self.app if not self.tasks: return self.freeze() # empty group returns GroupResult @@ -1133,6 +1334,28 @@ def set_immutable(self, immutable): for task in self.tasks: task.set_immutable(immutable) + def stamp(self, visitor=None, **headers): + if visitor is not None: + headers.update(visitor.on_group_start(self, **headers)) + + super().stamp(visitor=visitor, **headers) + + if isinstance(self.tasks, _regen): + self.tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) + else: + new_tasks = [] + for task in self.tasks: + task = maybe_signature(task, app=self.app) + task.stamp(visitor=visitor, **headers) + new_tasks.append(task) + if isinstance(self.tasks, MutableSequence): + self.tasks[:] = new_tasks + else: + self.tasks = new_tasks + + if visitor is not None: + visitor.on_group_end(self, **headers) + def link(self, sig): # Simply link to first task. Doing this is slightly misleading because # the callback may be executed before all children in the group are @@ -1225,7 +1448,10 @@ def _apply_tasks(self, tasks, producer=None, app=None, p=None, def _freeze_gid(self, options): # remove task_id and use that as the group_id, # if we don't remove it then every task will have the same id... - options = dict(self.options, **options) + options = {**self.options, **{ + k: v for k, v in options.items() + if k not in self._IMMUTABLE_OPTIONS or k not in self.options + }} options['group_id'] = group_id = ( options.pop('task_id', uuid())) return options, group_id, options.get('root_id') @@ -1403,26 +1629,52 @@ def freeze(self, _id=None, group_id=None, chord=None, # first freeze all tasks in the header header_result = self.tasks.freeze( parent_id=parent_id, root_id=root_id, chord=self.body) - # secondly freeze all tasks in the body: those that should be called after the header - body_result = self.body.freeze( - _id, root_id=root_id, chord=chord, group_id=group_id, - group_index=group_index) - # we need to link the body result back to the group result, - # but the body may actually be a chain, - # so find the first result without a parent - node = body_result - seen = set() - while node: - if node.id in seen: - raise RuntimeError('Recursive result parents') - seen.add(node.id) - if node.parent is None: - node.parent = header_result - break - node = node.parent self.id = self.tasks.id + # secondly freeze all tasks in the body: those that should be called after the header + + body_result = None + if self.body: + body_result = self.body.freeze( + _id, root_id=root_id, chord=chord, group_id=group_id, + group_index=group_index) + # we need to link the body result back to the group result, + # but the body may actually be a chain, + # so find the first result without a parent + node = body_result + seen = set() + while node: + if node.id in seen: + raise RuntimeError('Recursive result parents') + seen.add(node.id) + if node.parent is None: + node.parent = header_result + break + node = node.parent + return body_result + def stamp(self, visitor=None, **headers): + if visitor is not None and self.body is not None: + headers.update(visitor.on_chord_body(self, **headers)) + self.body.stamp(visitor=visitor, **headers) + + if visitor is not None: + headers.update(visitor.on_chord_header_start(self, **headers)) + super().stamp(visitor=visitor, **headers) + + tasks = self.tasks + if isinstance(tasks, group): + tasks = tasks.tasks + + if isinstance(tasks, _regen): + tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) + else: + for task in tasks: + task.stamp(visitor=visitor, **headers) + + if visitor is not None: + visitor.on_chord_header_end(self, **headers) + def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, publisher=None, connection=None, router=None, result_cls=None, **options): @@ -1441,7 +1693,13 @@ def apply_async(self, args=None, kwargs=None, task_id=None, return self.apply(args, kwargs, body=body, task_id=task_id, **options) + groups = self.options.get("groups") + stamped_headers = self.options.get("stamped_headers") + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + tasks.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + merged_options = dict(self.options, **options) if options else self.options + option_task_id = merged_options.pop("task_id", None) if task_id is None: task_id = option_task_id @@ -1453,9 +1711,13 @@ def apply(self, args=None, kwargs=None, propagate=True, body=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} + stamped_headers = self.options.get("stamped_headers") + groups = self.options.get("groups") body = self.body if body is None else body tasks = (self.tasks.clone() if isinstance(self.tasks, group) else group(self.tasks, app=self.app)) + self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + tasks.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) return body.apply( args=(tasks.apply(args, kwargs).get(propagate=propagate),), ) diff --git a/celery/utils/functional.py b/celery/utils/functional.py index 9402a123658..dc40ceb44f9 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -200,6 +200,10 @@ def __init__(self, it): def __reduce__(self): return list, (self.data,) + def map(self, func): + self.__consumed = [func(el) for el in self.__consumed] + self.__it = map(func, self.__it) + def __length_hint__(self): return self.__it.__length_hint__() diff --git a/celery/worker/request.py b/celery/worker/request.py index 4e4ae803ca6..d89971468c6 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -314,6 +314,18 @@ def reply_to(self): def replaced_task_nesting(self): return self._request_dict.get('replaced_task_nesting', 0) + @property + def groups(self): + return self._request_dict.get('groups', []) + + @property + def stamped_headers(self) -> list: + return self._request_dict.get('stamped_headers', []) + + @property + def stamps(self) -> dict: + return {header: self._request_dict[header] for header in self.stamped_headers} + @property def correlation_id(self): # used similarly to reply_to diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index e6b4014ea11..2cb42254acd 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1130,3 +1130,91 @@ of one: This means that the first task will have a countdown of one second, the second task a countdown of two seconds, and so on. + +Stamping +======== + +.. versionadded:: 5.3 + +The goal of the Stamping API is to give an ability to label +the signature and its components for debugging information purposes. +For example, when the canvas is a complex structure, it may be necessary to +label some or all elements of the formed structure. The complexity +increases even more when nested groups are rolled-out or chain +elements are replaced. In such cases, it may be necessary to +understand which group an element is a part of or on what nested +level it is. This requires a mechanism that traverses the canvas +elements and marks them with specific metadata. The stamping API +allows doing that based on the Visitor pattern. + +For example, + +.. code-block:: pycon + + >>> sig1 = add.si(2, 2) + >>> sig1_res = sig1.freeze() + >>> g = group(sig1, add.si(3, 3)) + >>> g.stamp(stamp='your_custom_stamp') + >>> res = g1.apply_async() + >>> res.get(timeout=TIMEOUT) + [4, 6] + >>> sig1_res._get_task_meta()['stamp'] + ['your_custom_stamp'] + +will initialize a group ``g`` and mark its components with stamp ``your_custom_stamp``. + +For this feature to be useful, you need to set the :setting:`result_extended` +configuration option to ``True`` or directive ``result_extended = True``. + + +Group stamping +-------------- + +When the ``apply`` and ``apply_async`` methods are called, +there is an automatic stamping signature with group id. +Stamps are stored in group header. +For example, after + +.. code-block:: pycon + + >>> g.apply_async() + +the header of task sig1 will store the stamp groups with g.id. +In the case of nested groups, the order of the stamps corresponds +to the nesting level. The group stamping is idempotent; +the task cannot be stamped twice with the same group id. + +Canvas stamping +---------------- + +In addition to the default group stamping, we can also stamp +canvas with custom stamps, as shown in the example. + +Custom stamping +---------------- + +If more complex stamping logic is required, it is possible +to implement custom stamping behavior based on the Visitor +pattern. The class that implements this custom logic must +inherit ``VisitorStamping`` and implement appropriate methods. + +For example, the following example ``InGroupVisitor`` will label +tasks that are in side of some group by lable ``in_group``. + +.. code-block:: python + class InGroupVisitor(StampingVisitor): + def __init__(self): + self.in_group = False + + def on_group_start(self, group, **headers) -> dict: + self.in_group = True + return {"in_group": [self.in_group], "stamped_headers": ["in_group"]} + + def on_group_end(self, group, **headers) -> None: + self.in_group = False + + def on_chain_start(self, chain, **headers) -> dict: + return {"in_group": [self.in_group], "stamped_headers": ["in_group"]} + + def on_signature(self, sig, **headers) -> dict: + return {"in_group": [self.in_group], "stamped_headers": ["in_group"]} diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 8348a6fc503..5dc6b0cae11 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -38,7 +38,8 @@ def celery_config(): 'cassandra_keyspace': 'tests', 'cassandra_table': 'tests', 'cassandra_read_consistency': 'ONE', - 'cassandra_write_consistency': 'ONE' + 'cassandra_write_consistency': 'ONE', + 'result_extended': True } diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 761c4a48980..dcb9d6575f8 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -1,3 +1,4 @@ +from collections.abc import Iterable from time import sleep from celery import Signature, Task, chain, chord, group, shared_task @@ -87,6 +88,12 @@ def tsum(nums): return sum(nums) +@shared_task +def xsum(nums): + """Sum of ints and lists.""" + return sum(sum(num) if isinstance(num, Iterable) else num for num in nums) + + @shared_task(bind=True) def add_replaced(self, x, y): """Add two numbers (via the add task).""" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 6de4c3b766c..2d9c272ae3b 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -20,7 +20,7 @@ errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, print_unicode, raise_error, redis_count, redis_echo, replace_with_chain, replace_with_chain_which_raises, replace_with_empty_chain, retry_once, return_exception, return_priority, second_order_replace1, - tsum, write_to_file_and_return_int) + tsum, write_to_file_and_return_int, xsum) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) @@ -31,7 +31,6 @@ def is_retryable_exception(exc): TIMEOUT = 60 - _flaky = pytest.mark.flaky(reruns=5, reruns_delay=1, cause=is_retryable_exception) _timeout = pytest.mark.timeout(timeout=300) @@ -47,7 +46,7 @@ def await_redis_echo(expected_msgs, redis_key="redis-echo", timeout=TIMEOUT): redis_connection = get_redis_connection() if isinstance(expected_msgs, (str, bytes, bytearray)): - expected_msgs = (expected_msgs, ) + expected_msgs = (expected_msgs,) expected_msgs = collections.Counter( e if not isinstance(e, str) else e.encode("utf-8") for e in expected_msgs @@ -127,7 +126,7 @@ def test_link_error_callback_retries(self): args=("test",), link_error=retry_once.s(countdown=None) ) - assert result.get(timeout=TIMEOUT, propagate=False) == exception + assert result.get(timeout=TIMEOUT / 10, propagate=False) == exception @flaky def test_link_error_using_signature_eager(self): @@ -148,7 +147,7 @@ def test_link_error_using_signature(self): fail.link_error(retrun_exception) exception = ExpectedException("Task expected to fail", "test") - assert (fail.delay().get(timeout=TIMEOUT, propagate=False), True) == ( + assert (fail.delay().get(timeout=TIMEOUT / 10, propagate=False), True) == ( exception, True) @@ -166,11 +165,11 @@ def test_single_chain(self, manager): @flaky def test_complex_chain(self, manager): + g = group(add.s(i) for i in range(4)) c = ( add.s(2, 2) | ( add.s(4) | add_replaced.s(8) | add.s(16) | add.s(32) - ) | - group(add.s(i) for i in range(4)) + ) | g ) res = c() assert res.get(timeout=TIMEOUT) == [64, 65, 66, 67] @@ -187,7 +186,7 @@ def test_group_results_in_chain(self, manager): ) ) res = c() - assert res.get(timeout=TIMEOUT) == [4, 5] + assert res.get(timeout=TIMEOUT / 10) == [4, 5] def test_chain_of_chain_with_a_single_task(self, manager): sig = signature('any_taskname', queue='any_q') @@ -482,7 +481,7 @@ def test_nested_chain_group_lone(self, manager): group(identity.s(42), identity.s(42)), # [42, 42] ) res = sig.delay() - assert res.get(timeout=TIMEOUT) == [42, 42] + assert res.get(timeout=TIMEOUT / 10) == [42, 42] def test_nested_chain_group_mid(self, manager): """ @@ -494,9 +493,9 @@ def test_nested_chain_group_mid(self, manager): raise pytest.skip(e.args[0]) sig = chain( - identity.s(42), # 42 - group(identity.s(), identity.s()), # [42, 42] - identity.s(), # [42, 42] + identity.s(42), # 42 + group(identity.s(), identity.s()), # [42, 42] + identity.s(), # [42, 42] ) res = sig.delay() assert res.get(timeout=TIMEOUT) == [42, 42] @@ -506,8 +505,8 @@ def test_nested_chain_group_last(self, manager): Test that a final group in a chain with preceding tasks completes. """ sig = chain( - identity.s(42), # 42 - group(identity.s(), identity.s()), # [42, 42] + identity.s(42), # 42 + group(identity.s(), identity.s()), # [42, 42] ) res = sig.delay() assert res.get(timeout=TIMEOUT) == [42, 42] @@ -777,6 +776,46 @@ def test_result_set_error(self, manager): class test_group: + def test_group_stamping(self, manager, subtests): + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + sig1 = add.s(1, 1000) + sig1_res = sig1.freeze() + g1 = group(sig1, add.s(1, 2000)) + g1_res = g1.freeze() + res = g1.apply_async() + res.get(timeout=TIMEOUT) + + with subtests.test("sig_1 is stamped", groups=[g1_res.id]): + assert sig1_res._get_task_meta()["groups"] == [g1_res.id] + + def test_nested_group_stamping(self, manager, subtests): + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + sig1 = add.s(2, 2) + sig2 = add.s(2) + + sig1_res = sig1.freeze() + sig2_res = sig2.freeze() + + g2 = group(sig2, chain(add.s(4), add.s(2))) + + g2_res = g2.freeze() + + g1 = group(sig1, chain(add.s(1, 1), g2)) + + g1_res = g1.freeze() + res = g1.apply_async() + res.get(timeout=TIMEOUT) + + with subtests.test("sig1 is stamped", groups=[g1_res.id]): + assert sig1_res._get_task_meta()['groups'] == [g1_res.id] + with subtests.test("sig2 is stamped", groups=[g1_res.id, g2_res.id]): + assert sig2_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + @flaky def test_ready_with_exception(self, manager): if not manager.app.conf.result_backend.startswith('redis'): @@ -850,7 +889,7 @@ def test_group_lone(self, manager): """ Test that a simple group completes. """ - sig = group(identity.s(42), identity.s(42)) # [42, 42] + sig = group(identity.s(42), identity.s(42)) # [42, 42] res = sig.delay() assert res.get(timeout=TIMEOUT) == [42, 42] @@ -860,7 +899,7 @@ def test_nested_group_group(self, manager): """ sig = group( group(identity.s(42), identity.s(42)), # [42, 42] - ) # [42, 42] due to unrolling + ) # [42, 42] due to unrolling res = sig.delay() assert res.get(timeout=TIMEOUT) == [42, 42] @@ -871,8 +910,8 @@ def test_nested_group_chord_counting_simple(self, manager): raise pytest.skip(e.args[0]) gchild_sig = identity.si(42) - child_chord = chord((gchild_sig, ), identity.s()) - group_sig = group((child_chord, )) + child_chord = chord((gchild_sig,), identity.s()) + group_sig = group((child_chord,)) res = group_sig.delay() # Wait for the result to land and confirm its value is as expected assert res.get(timeout=TIMEOUT) == [[42]] @@ -884,9 +923,9 @@ def test_nested_group_chord_counting_chain(self, manager): raise pytest.skip(e.args[0]) gchild_count = 42 - gchild_sig = chain((identity.si(1337), ) * gchild_count) - child_chord = chord((gchild_sig, ), identity.s()) - group_sig = group((child_chord, )) + gchild_sig = chain((identity.si(1337),) * gchild_count) + child_chord = chord((gchild_sig,), identity.s()) + group_sig = group((child_chord,)) res = group_sig.delay() # Wait for the result to land and confirm its value is as expected assert res.get(timeout=TIMEOUT) == [[1337]] @@ -898,9 +937,9 @@ def test_nested_group_chord_counting_group(self, manager): raise pytest.skip(e.args[0]) gchild_count = 42 - gchild_sig = group((identity.si(1337), ) * gchild_count) - child_chord = chord((gchild_sig, ), identity.s()) - group_sig = group((child_chord, )) + gchild_sig = group((identity.si(1337),) * gchild_count) + child_chord = chord((gchild_sig,), identity.s()) + group_sig = group((child_chord,)) res = group_sig.delay() # Wait for the result to land and confirm its value is as expected assert res.get(timeout=TIMEOUT) == [[1337] * gchild_count] @@ -913,10 +952,10 @@ def test_nested_group_chord_counting_chord(self, manager): gchild_count = 42 gchild_sig = chord( - (identity.si(1337), ) * gchild_count, identity.si(31337), + (identity.si(1337),) * gchild_count, identity.si(31337), ) - child_chord = chord((gchild_sig, ), identity.s()) - group_sig = group((child_chord, )) + child_chord = chord((gchild_sig,), identity.s()) + group_sig = group((child_chord,)) res = group_sig.delay() # Wait for the result to land and confirm its value is as expected assert res.get(timeout=TIMEOUT) == [[31337]] @@ -931,19 +970,19 @@ def test_nested_group_chord_counting_mixed(self, manager): child_chord = chord( ( identity.si(42), - chain((identity.si(42), ) * gchild_count), - group((identity.si(42), ) * gchild_count), - chord((identity.si(42), ) * gchild_count, identity.si(1337)), + chain((identity.si(42),) * gchild_count), + group((identity.si(42),) * gchild_count), + chord((identity.si(42),) * gchild_count, identity.si(1337)), ), identity.s(), ) - group_sig = group((child_chord, )) + group_sig = group((child_chord,)) res = group_sig.delay() # Wait for the result to land and confirm its value is as expected. The # group result gets unrolled into the encapsulating chord, hence the # weird unpacking below assert res.get(timeout=TIMEOUT) == [ - [42, 42, *((42, ) * gchild_count), 1337] + [42, 42, *((42,) * gchild_count), 1337] ] @pytest.mark.xfail(raises=TimeoutError, reason="#6734") @@ -953,8 +992,8 @@ def test_nested_group_chord_body_chain(self, manager): except NotImplementedError as e: raise pytest.skip(e.args[0]) - child_chord = chord(identity.si(42), chain((identity.s(), ))) - group_sig = group((child_chord, )) + child_chord = chord(identity.si(42), chain((identity.s(),))) + group_sig = group((child_chord,)) res = group_sig.delay() # The result can be expected to timeout since it seems like its # underlying promise might not be getting fulfilled (ref #6734). Pick a @@ -1219,6 +1258,43 @@ def assert_ping(manager): class test_chord: + def test_chord_stamping_two_levels(self, manager, subtests): + """ + For a group within a chord, test that group stamps are stored in + the correct order. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + sig_1 = add.s(2, 2) + sig_2 = add.s(2) + + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g2 = group( + sig_2, + add.s(4), + ) + + g2_res = g2.freeze() + + sig_sum = xsum.s() + sig_sum.freeze() + + g1 = chord([sig_1, chain(add.s(4, 4), g2)], sig_sum) + g1.freeze() + + res = g1.apply_async() + res.get(timeout=TIMEOUT) + + with subtests.test("sig_1_res is stamped", groups=[g1.tasks.id]): + assert sig_1_res._get_task_meta()['groups'] == [g1.tasks.id] + with subtests.test("sig_2_res is stamped", groups=[g1.id]): + assert sig_2_res._get_task_meta()['groups'] == [g1.tasks.id, g2_res.id] + @flaky def test_simple_chord_with_a_delay_in_group_save(self, manager, monkeypatch): try: @@ -1589,6 +1665,7 @@ def assert_generator(file_name): with open(file_name) as file_handle: # ensures chord header generators tasks are processed incrementally #3021 assert file_handle.readline() == '0\n', "Chord header was unrolled too early" + yield write_to_file_and_return_int.s(file_name, i) with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file: @@ -1752,7 +1829,7 @@ def test_nested_chord_group(self, manager): ( group(identity.s(42), identity.s(42)), # [42, 42] ), - identity.s() # [42, 42] + identity.s() # [42, 42] ) res = sig.delay() assert res.get(timeout=TIMEOUT) == [42, 42] @@ -1772,14 +1849,14 @@ def test_nested_chord_group_chain_group_tail(self, manager): sig = chord( group( chain( - identity.s(42), # 42 + identity.s(42), # 42 group( - identity.s(), # 42 - identity.s(), # 42 - ), # [42, 42] - ), # [42, 42] - ), # [[42, 42]] since the chain prevents unrolling - identity.s(), # [[42, 42]] + identity.s(), # 42 + identity.s(), # 42 + ), # [42, 42] + ), # [42, 42] + ), # [[42, 42]] since the chain prevents unrolling + identity.s(), # [[42, 42]] ) res = sig.delay() assert res.get(timeout=TIMEOUT) == [[42, 42]] @@ -1817,13 +1894,13 @@ def test_error_propagates_to_chord_from_simple(self, manager, subtests): child_sig = fail.s() - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) with subtests.test(msg="Error propagates from simple header task"): res = chord_sig.delay() with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) with subtests.test(msg="Error propagates from simple body task"): res = chord_sig.delay() with pytest.raises(ExpectedException): @@ -1841,7 +1918,7 @@ def test_immutable_errback_called_by_chord_from_simple( errback = redis_echo.si(errback_msg, redis_key=redis_key) child_sig = fail.s() - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from simple header task"): @@ -1853,7 +1930,7 @@ def test_immutable_errback_called_by_chord_from_simple( ): await_redis_echo({errback_msg, }, redis_key=redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from simple body task"): @@ -1879,7 +1956,7 @@ def test_mutable_errback_called_by_chord_from_simple( errback = errback_task.s() child_sig = fail.s() - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) @@ -1892,7 +1969,7 @@ def test_mutable_errback_called_by_chord_from_simple( ): await_redis_count(1, redis_key=expected_redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) @@ -1914,7 +1991,7 @@ def test_error_propagates_to_chord_from_chain(self, manager, subtests): child_sig = chain(identity.si(42), fail.s(), identity.si(42)) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) with subtests.test( msg="Error propagates from header chain which fails before the end" ): @@ -1922,7 +1999,7 @@ def test_error_propagates_to_chord_from_chain(self, manager, subtests): with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) with subtests.test( msg="Error propagates from body chain which fails before the end" ): @@ -1942,7 +2019,7 @@ def test_immutable_errback_called_by_chord_from_chain( errback = redis_echo.si(errback_msg, redis_key=redis_key) child_sig = chain(identity.si(42), fail.s(), identity.si(42)) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test( @@ -1956,7 +2033,7 @@ def test_immutable_errback_called_by_chord_from_chain( ): await_redis_echo({errback_msg, }, redis_key=redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test( @@ -1986,7 +2063,7 @@ def test_mutable_errback_called_by_chord_from_chain( fail_sig_id = fail_sig.freeze().id child_sig = chain(identity.si(42), fail_sig, identity.si(42)) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) @@ -2001,7 +2078,7 @@ def test_mutable_errback_called_by_chord_from_chain( ): await_redis_count(1, redis_key=expected_redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) expected_redis_key = fail_sig_id redis_connection.delete(expected_redis_key) @@ -2025,7 +2102,7 @@ def test_error_propagates_to_chord_from_chain_tail(self, manager, subtests): child_sig = chain(identity.si(42), fail.s()) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) with subtests.test( msg="Error propagates from header chain which fails at the end" ): @@ -2033,7 +2110,7 @@ def test_error_propagates_to_chord_from_chain_tail(self, manager, subtests): with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) with subtests.test( msg="Error propagates from body chain which fails at the end" ): @@ -2053,7 +2130,7 @@ def test_immutable_errback_called_by_chord_from_chain_tail( errback = redis_echo.si(errback_msg, redis_key=redis_key) child_sig = chain(identity.si(42), fail.s()) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test( @@ -2067,7 +2144,7 @@ def test_immutable_errback_called_by_chord_from_chain_tail( ): await_redis_echo({errback_msg, }, redis_key=redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test( @@ -2097,7 +2174,7 @@ def test_mutable_errback_called_by_chord_from_chain_tail( fail_sig_id = fail_sig.freeze().id child_sig = chain(identity.si(42), fail_sig) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) @@ -2112,7 +2189,7 @@ def test_mutable_errback_called_by_chord_from_chain_tail( ): await_redis_count(1, redis_key=expected_redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) expected_redis_key = fail_sig_id redis_connection.delete(expected_redis_key) @@ -2136,13 +2213,13 @@ def test_error_propagates_to_chord_from_group(self, manager, subtests): child_sig = group(identity.si(42), fail.s()) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) with subtests.test(msg="Error propagates from header group"): res = chord_sig.delay() with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) with subtests.test(msg="Error propagates from body group"): res = chord_sig.delay() with pytest.raises(ExpectedException): @@ -2160,7 +2237,7 @@ def test_immutable_errback_called_by_chord_from_group( errback = redis_echo.si(errback_msg, redis_key=redis_key) child_sig = group(identity.si(42), fail.s()) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from header group"): @@ -2170,7 +2247,7 @@ def test_immutable_errback_called_by_chord_from_group( with subtests.test(msg="Errback is called after header group fails"): await_redis_echo({errback_msg, }, redis_key=redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from body group"): @@ -2196,7 +2273,7 @@ def test_mutable_errback_called_by_chord_from_group( fail_sig_id = fail_sig.freeze().id child_sig = group(identity.si(42), fail_sig) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) @@ -2207,7 +2284,7 @@ def test_mutable_errback_called_by_chord_from_group( with subtests.test(msg="Errback is called after header group fails"): await_redis_count(1, redis_key=expected_redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) expected_redis_key = fail_sig_id redis_connection.delete(expected_redis_key) @@ -2235,7 +2312,7 @@ def test_immutable_errback_called_by_chord_from_group_fail_multiple( *(fail.s() for _ in range(fail_task_count)), ) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from header group"): @@ -2248,7 +2325,7 @@ def test_immutable_errback_called_by_chord_from_group_fail_multiple( # is attached to the chord body which is a single task! await_redis_count(1, redis_key=redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) redis_connection.delete(redis_key) with subtests.test(msg="Error propagates from body group"): @@ -2285,12 +2362,13 @@ def test_mutable_errback_called_by_chord_from_group_fail_multiple( *fail_sigs, ) - chord_sig = chord((child_sig, ), identity.s()) + chord_sig = chord((child_sig,), identity.s()) chord_sig.link_error(errback) expected_redis_key = chord_sig.body.freeze().id redis_connection.delete(expected_redis_key) with subtests.test(msg="Error propagates from header group"): res = chord_sig.delay() + sleep(1) with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) with subtests.test(msg="Errback is called after header group fails"): @@ -2298,12 +2376,13 @@ def test_mutable_errback_called_by_chord_from_group_fail_multiple( # is attached to the chord body which is a single task! await_redis_count(1, redis_key=expected_redis_key) - chord_sig = chord((identity.si(42), ), child_sig) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) for fail_sig_id in fail_sig_ids: redis_connection.delete(fail_sig_id) with subtests.test(msg="Error propagates from body group"): res = chord_sig.delay() + sleep(1) with pytest.raises(ExpectedException): res.get(timeout=TIMEOUT) with subtests.test(msg="Errback is called after body group fails"): @@ -2341,7 +2420,7 @@ def test_chord_header_child_replaced_with_chain_first(self, manager): raise pytest.skip(e.args[0]) orig_sig = chord( - (replace_with_chain.si(42), identity.s(1337), ), + (replace_with_chain.si(42), identity.s(1337),), identity.s(), ) res_obj = orig_sig.delay() @@ -2354,7 +2433,7 @@ def test_chord_header_child_replaced_with_chain_middle(self, manager): raise pytest.skip(e.args[0]) orig_sig = chord( - (identity.s(42), replace_with_chain.s(1337), identity.s(31337), ), + (identity.s(42), replace_with_chain.s(1337), identity.s(31337),), identity.s(), ) res_obj = orig_sig.delay() @@ -2367,7 +2446,7 @@ def test_chord_header_child_replaced_with_chain_last(self, manager): raise pytest.skip(e.args[0]) orig_sig = chord( - (identity.s(42), replace_with_chain.s(1337), ), + (identity.s(42), replace_with_chain.s(1337),), identity.s(), ) res_obj = orig_sig.delay() diff --git a/t/unit/conftest.py b/t/unit/conftest.py index 9b0b46921d0..26b0e42d9ff 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -354,7 +354,7 @@ def sleepdeprived(request): >>> pass """ module = request.node.get_closest_marker( - "sleepdeprived_patched_module").args[0] + "sleepdeprived_patched_module").args[0] old_sleep, module.sleep = module.sleep, noop try: yield diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index f673159954b..f7b5f7cac9f 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1,4 +1,5 @@ import json +import math from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest @@ -16,6 +17,10 @@ 'options': {'task_id': 'TASK_ID'}, 'subtask_type': ''}, ) +try: + from collections import Iterable +except ImportError: + from collections.abc import Iterable class test_maybe_unroll_group: @@ -36,18 +41,57 @@ def setup(self): @self.app.task(shared=False) def add(x, y): return x + y + self.add = add @self.app.task(shared=False) def mul(x, y): return x * y + self.mul = mul @self.app.task(shared=False) def div(x, y): return x / y + self.div = div + @self.app.task(shared=False) + def xsum(numbers): + return sum(sum(num) if isinstance(num, Iterable) else num for num in numbers) + + self.xsum = xsum + + @self.app.task(shared=False, bind=True) + def replaced(self, x, y): + return self.replace(add.si(x, y)) + + self.replaced = replaced + + @self.app.task(shared=False, bind=True) + def replaced_group(self, x, y): + return self.replace(group(add.si(x, y), mul.si(x, y))) + + self.replaced_group = replaced_group + + @self.app.task(shared=False, bind=True) + def replace_with_group(self, x, y): + return self.replace(group(add.si(x, y), mul.si(x, y))) + + self.replace_with_group = replace_with_group + + @self.app.task(shared=False, bind=True) + def replace_with_chain(self, x, y): + return self.replace(group(add.si(x, y) | mul.s(y), add.si(x, y))) + + self.replace_with_chain = replace_with_chain + + @self.app.task(shared=False) + def xprod(numbers): + return math.prod(numbers) + + self.xprod = xprod + @Signature.register_type() class chord_subclass(chord): @@ -78,6 +122,65 @@ def __init__(self, *args, **kwargs): class test_Signature(CanvasCase): + def test_double_stamping(self, subtests): + """ + Test manual signature stamping with two different stamps. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_1.stamp(stamp1="stamp1") + sig_1.stamp(stamp2="stamp2") + sig_1_res = sig_1.freeze() + sig_1.apply() + + with subtests.test("sig_1_res is stamped with stamp1", stamp1=["stamp1"]): + assert sig_1_res._get_task_meta()["stamp1"] == ["stamp1"] + + with subtests.test("sig_1_res is stamped with stamp2", stamp2=["stamp2"]): + assert sig_1_res._get_task_meta()["stamp2"] == ["stamp2"] + + with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): + assert sig_1_res._get_task_meta()["stamped_headers"] == ["stamp2", "stamp1", "groups"] + + def test_twice_stamping(self, subtests): + """ + Test manual signature stamping with two stamps twice. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_1.stamp(stamp="stamp1") + sig_1.stamp(stamp="stamp2") + sig_1_res = sig_1.freeze() + sig_1.apply() + + with subtests.test("sig_1_res is stamped twice", stamps=["stamp2", "stamp1"]): + assert sig_1_res._get_task_meta()["stamp"] == ["stamp2", "stamp1"] + + with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): + assert sig_1_res._get_task_meta()["stamped_headers"] == ["stamp", "groups"] + + @pytest.mark.usefixtures('depends_on_current_app') + def test_manual_stamping(self): + """ + Test manual signature stamping. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + stamps = ["stamp1", "stamp2"] + sig_1.stamp(visitor=None, groups=[stamps[1]]) + sig_1.stamp(visitor=None, groups=stamps[0]) + sig_1_res = sig_1.freeze() + sig_1.apply() + assert sig_1_res._get_task_meta()['groups'] == stamps def test_getitem_property_class(self): assert Signature.task @@ -661,6 +764,326 @@ def test_chain_single_child_group_result(self): class test_group(CanvasCase): + def test_group_stamping_one_level(self, subtests): + """ + Test that when a group ID is frozen, that group ID is stored in + each task within the group. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(4, 4) + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g = group(sig_1, sig_2, app=self.app) + g.stamp(stamp="stamp") + g_res = g.freeze() + g.apply() + + with subtests.test("sig_1_res is stamped", groups=[g_res.id]): + assert sig_1_res._get_task_meta()['groups'] == [g_res.id] + + with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): + assert sig_1_res._get_task_meta()['stamp'] == ["stamp"] + + with subtests.test("sig_2_res is stamped", groups=[g_res.id]): + assert sig_2_res._get_task_meta()['groups'] == [g_res.id] + + with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): + assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] + + with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): + assert sig_1_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + + with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): + assert sig_2_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + + def test_group_stamping_two_levels(self, subtests): + """ + For a group within a group, test that group stamps are stored in + the correct order. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(1, 1) + nested_sig_1 = self.add.s(2) + nested_sig_2 = self.add.s(4) + + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + first_nested_sig_res = nested_sig_1.freeze() + second_nested_sig_res = nested_sig_2.freeze() + + g2 = group( + nested_sig_1, + nested_sig_2, + app=self.app + ) + + g2_res = g2.freeze() + + g1 = group( + sig_1, + chain( + sig_2, + g2, + app=self.app + ), + app=self.app + ) + + g1_res = g1.freeze() + g1.apply() + + with subtests.test("sig_1_res is stamped", groups=[g1_res.id]): + assert sig_1_res._get_task_meta()['groups'] == [g1_res.id] + with subtests.test("sig_2_res is stamped", groups=[g1_res.id]): + assert sig_2_res._get_task_meta()['groups'] == [g1_res.id] + with subtests.test("first_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): + assert first_nested_sig_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + with subtests.test("second_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): + assert second_nested_sig_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + + def test_group_stamping_with_replace(self, subtests): + """ + For a group within a replaced element, test that group stamps are replaced correctly. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(2, 2) | self.replaced.s(8) + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g = group(sig_1, sig_2, app=self.app) + g_res = g.freeze() + g.apply() + + with subtests.test("sig_1_res is stamped", groups=[g_res.id]): + assert sig_1_res._get_task_meta()['groups'] == [g_res.id] + with subtests.test("sig_2_res is stamped", groups=[g_res.id]): + assert sig_2_res._get_task_meta()['groups'] == [g_res.id] + + def test_group_stamping_with_replaced_group(self, subtests): + """ + For a group within a replaced element, test that group stamps are replaced correctly. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + nested_g = self.replace_with_group.s(8) + nested_g_res = nested_g.freeze() + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(2, 2) | nested_g + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g = group(sig_1, sig_2, app=self.app) + g_res = g.freeze() + g.apply() + + with subtests.test("sig_1_res is stamped", groups=[g_res.id]): + assert sig_1_res._get_task_meta()['groups'] == [g_res.id] + with subtests.test("sig_2_res is stamped", groups=nested_g_res._get_task_meta()['groups']): + assert sig_2_res._get_task_meta()['groups'] == nested_g_res._get_task_meta()['groups'] + + def test_group_stamping_with_replaced_chain(self, subtests): + """ + For a group within a replaced element, test that group stamps are replaced correctly. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + nested_g = self.replace_with_chain.s(8) + nested_g_res = nested_g.freeze() + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(2, 2) | nested_g + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g = group(sig_1, sig_2, app=self.app) + g_res = g.freeze() + g.apply() + + with subtests.test("sig_1_res is stamped", groups=[g_res.id]): + assert sig_1_res._get_task_meta()['groups'] == [g_res.id] + with subtests.test("sig_2_res is stamped", groups=nested_g_res._get_task_meta()['groups']): + assert sig_2_res._get_task_meta()['groups'] == nested_g_res._get_task_meta()['groups'] + + def test_group_stamping_three_levels(self, subtests): + """ + For groups with three levels of nesting, test that group stamps + are saved in the correct order for all nesting levels. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_in_g1_1 = self.add.s(2, 2) + sig_in_g1_2 = self.add.s(1, 1) + sig_in_g2 = self.add.s(2) + sig_in_g2_chain = self.add.s(4) + sig_in_g3_1 = self.add.s(8) + sig_in_g3_2 = self.add.s(16) + + sig_in_g1_1_res = sig_in_g1_1.freeze() + sig_in_g1_2_res = sig_in_g1_2.freeze() + sig_in_g2_res = sig_in_g2.freeze() + sig_in_g2_chain_res = sig_in_g2_chain.freeze() + sig_in_g3_1_res = sig_in_g3_1.freeze() + sig_in_g3_2_res = sig_in_g3_2.freeze() + + g3 = group( + sig_in_g3_1, + sig_in_g3_2, + app=self.app + ) + + g3_res = g3.freeze() + + g2 = group( + sig_in_g2, + chain( + sig_in_g2_chain, + g3 + ), + app=self.app + ) + + g2_res = g2.freeze() + + g1 = group( + sig_in_g1_1, + chain( + sig_in_g1_2, + g2, + app=self.app + ), + app=self.app + ) + + g1_res = g1.freeze() + g1.apply() + + with subtests.test("sig_in_g1_1_res is stamped", groups=[g1_res.id]): + assert sig_in_g1_1_res._get_task_meta()['groups'] == [g1_res.id] + with subtests.test("sig_in_g1_2_res is stamped", groups=[g1_res.id]): + assert sig_in_g1_2_res._get_task_meta()['groups'] == [g1_res.id] + with subtests.test("sig_in_g2_res is stamped", groups=[g1_res.id, g2_res.id]): + assert sig_in_g2_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + with subtests.test("sig_in_g2_chain_res is stamped", groups=[g1_res.id, g2_res.id]): + assert sig_in_g2_chain_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + with subtests.test("sig_in_g3_1_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): + assert sig_in_g3_1_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id, g3_res.id] + with subtests.test("sig_in_g3_2_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): + assert sig_in_g3_2_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id, g3_res.id] + + def test_group_stamping_parallel_groups(self, subtests): + """ + In the case of group within a group that is from another canvas + element, ensure that group stamps are added correctly when groups are + run in parallel. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_in_g1 = self.add.s(1, 1) + sig_in_g2_chain = self.add.s(2, 2) + sig_in_g2_1 = self.add.s(4) + sig_in_g2_2 = self.add.s(8) + sig_in_g3_chain = self.add.s(2, 2) + sig_in_g3_1 = self.add.s(4) + sig_in_g3_2 = self.add.s(8) + + sig_in_g1_res = sig_in_g1.freeze(_id='sig_in_g1') + sig_in_g2_chain_res = sig_in_g2_chain.freeze(_id='sig_in_g2_chain') + sig_in_g2_1_res = sig_in_g2_1.freeze(_id='sig_in_g2_1') + sig_in_g2_2_res = sig_in_g2_2.freeze(_id='sig_in_g2_2') + sig_in_g3_chain_res = sig_in_g3_chain.freeze(_id='sig_in_g3_chain') + sig_in_g3_1_res = sig_in_g3_1.freeze(_id='sig_in_g3_1') + sig_in_g3_2_res = sig_in_g3_2.freeze(_id='sig_in_g3_2') + + g3 = group( + sig_in_g3_1, + sig_in_g3_2, + app=self.app + ) + g3_res = g3.freeze(group_id='g3') + + g2 = group( + sig_in_g2_1, + sig_in_g2_2, + app=self.app + ) + g2_res = g2.freeze(group_id='g2') + + g1 = group( + sig_in_g1, + chain( + sig_in_g2_chain, + g2, + app=self.app + ), + chain( + sig_in_g3_chain, + g3, + app=self.app + ), + ) + g1_res = g1.freeze(group_id='g1') + g1.apply() + + with subtests.test("sig_in_g1 is stamped", groups=[g1_res.id]): + assert sig_in_g1_res.id == 'sig_in_g1' + assert sig_in_g1_res._get_task_meta()['groups'] == [g1_res.id] + + with subtests.test("sig_in_g2_chain is stamped", groups=[g1_res.id]): + assert sig_in_g2_chain_res.id == 'sig_in_g2_chain' + assert sig_in_g2_chain_res._get_task_meta()['groups'] == \ + [g1_res.id] + + with subtests.test("sig_in_g2_1 is stamped", groups=[g1_res.id, g2_res.id]): + assert sig_in_g2_1_res.id == 'sig_in_g2_1' + assert sig_in_g2_1_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + + with subtests.test("sig_in_g2_2 is stamped", + groups=[g1_res.id, g2_res.id]): + assert sig_in_g2_2_res.id == 'sig_in_g2_2' + assert sig_in_g2_2_res._get_task_meta()['groups'] == \ + [g1_res.id, g2_res.id] + + with subtests.test("sig_in_g3_chain is stamped", + groups=[g1_res.id]): + assert sig_in_g3_chain_res.id == 'sig_in_g3_chain' + assert sig_in_g3_chain_res._get_task_meta()['groups'] == \ + [g1_res.id] + + with subtests.test("sig_in_g3_1 is stamped", + groups=[g1_res.id, g3_res.id]): + assert sig_in_g3_1_res.id == 'sig_in_g3_1' + assert sig_in_g3_1_res._get_task_meta()['groups'] == \ + [g1_res.id, g3_res.id] + + with subtests.test("sig_in_g3_2 is stamped", + groups=[g1_res.id, g3_res.id]): + assert sig_in_g3_2_res._get_task_meta()['groups'] == \ + [g1_res.id, g3_res.id] def test_repr(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) @@ -886,9 +1309,9 @@ def test_apply_from_generator_empty(self): def test_apply_contains_chord(self): gchild_count = 42 gchild_sig = self.add.si(0, 0) - gchild_sigs = (gchild_sig, ) * gchild_count + gchild_sigs = (gchild_sig,) * gchild_count child_chord = chord(gchild_sigs, gchild_sig) - group_sig = group((child_chord, )) + group_sig = group((child_chord,)) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -906,10 +1329,10 @@ def test_apply_contains_chord(self): def test_apply_contains_chords_containing_chain(self): ggchild_count = 42 ggchild_sig = self.add.si(0, 0) - gchild_sig = chain((ggchild_sig, ) * ggchild_count) + gchild_sig = chain((ggchild_sig,) * ggchild_count) child_count = 24 - child_chord = chord((gchild_sig, ), ggchild_sig) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), ggchild_sig) + group_sig = group((child_chord,) * child_count) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -922,14 +1345,14 @@ def test_apply_contains_chords_containing_chain(self): assert len(res_obj.children) == child_count # We must have set the chord sizes based on the number of tail tasks of # the encapsulated chains - in this case 1 for each child chord - mock_set_chord_size.assert_has_calls((call(ANY, 1), ) * child_count) + mock_set_chord_size.assert_has_calls((call(ANY, 1),) * child_count) @pytest.mark.xfail(reason="Invalid canvas setup with bad exception") def test_apply_contains_chords_containing_empty_chain(self): gchild_sig = chain(tuple()) child_count = 24 - child_chord = chord((gchild_sig, ), self.add.si(0, 0)) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), self.add.si(0, 0)) + group_sig = group((child_chord,) * child_count) # This is an invalid setup because we can't complete a chord header if # there are no actual tasks which will run in it. However, the current # behaviour of an `IndexError` isn't particularly helpful to a user. @@ -940,11 +1363,11 @@ def test_apply_contains_chords_containing_chain_with_empty_tail(self): ggchild_sig = self.add.si(0, 0) tail_count = 24 gchild_sig = chain( - (ggchild_sig, ) * ggchild_count + - (group((ggchild_sig, ) * tail_count), group(tuple()), ), + (ggchild_sig,) * ggchild_count + + (group((ggchild_sig,) * tail_count), group(tuple()),), ) - child_chord = chord((gchild_sig, ), ggchild_sig) - group_sig = group((child_chord, )) + child_chord = chord((gchild_sig,), ggchild_sig) + group_sig = group((child_chord,)) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -963,10 +1386,10 @@ def test_apply_contains_chords_containing_chain_with_empty_tail(self): def test_apply_contains_chords_containing_group(self): ggchild_count = 42 ggchild_sig = self.add.si(0, 0) - gchild_sig = group((ggchild_sig, ) * ggchild_count) + gchild_sig = group((ggchild_sig,) * ggchild_count) child_count = 24 - child_chord = chord((gchild_sig, ), ggchild_sig) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), ggchild_sig) + group_sig = group((child_chord,) * child_count) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -980,15 +1403,15 @@ def test_apply_contains_chords_containing_group(self): # We must have set the chord sizes based on the number of tail tasks of # the encapsulated groups - in this case `ggchild_count` mock_set_chord_size.assert_has_calls( - (call(ANY, ggchild_count), ) * child_count, + (call(ANY, ggchild_count),) * child_count, ) @pytest.mark.xfail(reason="Invalid canvas setup but poor behaviour") def test_apply_contains_chords_containing_empty_group(self): gchild_sig = group(tuple()) child_count = 24 - child_chord = chord((gchild_sig, ), self.add.si(0, 0)) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), self.add.si(0, 0)) + group_sig = group((child_chord,) * child_count) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -1003,15 +1426,15 @@ def test_apply_contains_chords_containing_empty_group(self): # chain test, this is an invalid setup. However, we should probably # expect that the chords are dealt with in some other way the probably # being left incomplete forever... - mock_set_chord_size.assert_has_calls((call(ANY, 0), ) * child_count) + mock_set_chord_size.assert_has_calls((call(ANY, 0),) * child_count) def test_apply_contains_chords_containing_chord(self): ggchild_count = 42 ggchild_sig = self.add.si(0, 0) - gchild_sig = chord((ggchild_sig, ) * ggchild_count, ggchild_sig) + gchild_sig = chord((ggchild_sig,) * ggchild_count, ggchild_sig) child_count = 24 - child_chord = chord((gchild_sig, ), ggchild_sig) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), ggchild_sig) + group_sig = group((child_chord,) * child_count) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -1027,14 +1450,14 @@ def test_apply_contains_chords_containing_chord(self): # child chord. This means we have `child_count` interleaved calls to # set chord sizes of 1 and `ggchild_count`. mock_set_chord_size.assert_has_calls( - (call(ANY, 1), call(ANY, ggchild_count), ) * child_count, + (call(ANY, 1), call(ANY, ggchild_count),) * child_count, ) def test_apply_contains_chords_containing_empty_chord(self): gchild_sig = chord(tuple(), self.add.si(0, 0)) child_count = 24 - child_chord = chord((gchild_sig, ), self.add.si(0, 0)) - group_sig = group((child_chord, ) * child_count) + child_chord = chord((gchild_sig,), self.add.si(0, 0)) + group_sig = group((child_chord,) * child_count) with patch.object( self.app.backend, "set_chord_size", ) as mock_set_chord_size, patch( @@ -1047,10 +1470,126 @@ def test_apply_contains_chords_containing_empty_chord(self): assert len(res_obj.children) == child_count # We must have set the chord sizes based on the number of tail tasks of # the encapsulated chains - in this case 1 for each child chord - mock_set_chord_size.assert_has_calls((call(ANY, 1), ) * child_count) + mock_set_chord_size.assert_has_calls((call(ANY, 1),) * child_count) class test_chord(CanvasCase): + def test_chord_stamping_one_level(self, subtests): + """ + In the case of group within a chord that is from another canvas + element, ensure that chord stamps are added correctly when chord are + run in parallel. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(4, 4) + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + sig_sum = self.xsum.s() + sig_sum_res = sig_sum.freeze() + + g = chord([sig_1, sig_2], sig_sum, app=self.app) + g.stamp(stamp="stamp") + g.freeze() + g.apply() + + with subtests.test("sig_sum_res body isn't stamped", groups=[]): + assert sig_sum_res._get_task_meta()['groups'] == [] + + with subtests.test("sig_1_res is stamped", groups=[g.id]): + assert sig_1_res._get_task_meta()['groups'] == [g.id] + + with subtests.test("sig_2_res is stamped", groups=[g.id]): + assert sig_2_res._get_task_meta()['groups'] == [g.id] + + with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): + assert sig_1_res._get_task_meta()['stamp'] == ["stamp"] + + with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): + assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] + + with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): + assert sig_1_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + + with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp", 'groups']): + assert sig_2_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + + def test_chord_stamping_two_levels(self, subtests): + """ + For a group within a chord, test that group stamps are stored in + the correct order. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(1, 1) + nested_sig_1 = self.add.s(2) + nested_sig_2 = self.add.s(4) + + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + first_nested_sig_res = nested_sig_1.freeze() + second_nested_sig_res = nested_sig_2.freeze() + + g2 = group( + nested_sig_1, + nested_sig_2, + app=self.app + ) + + g2_res = g2.freeze() + + sig_sum = self.xsum.s() + sig_sum.freeze() + + g1 = chord([sig_2, chain(sig_1, g2)], sig_sum, app=self.app) + + g1.freeze() + g1.apply() + + with subtests.test("sig_1_res body is stamped", groups=[g1.id]): + assert sig_1_res._get_task_meta()['groups'] == [g1.id] + with subtests.test("sig_2_res body is stamped", groups=[g1.id]): + assert sig_2_res._get_task_meta()['groups'] == [g1.id] + with subtests.test("first_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): + assert first_nested_sig_res._get_task_meta()['groups'] == \ + [g1.id, g2_res.id] + with subtests.test("second_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): + assert second_nested_sig_res._get_task_meta()['groups'] == \ + [g1.id, g2_res.id] + + def test_chord_stamping_body_group(self, subtests): + """ + In the case of group within a chord that is from another canvas + element, ensure that chord stamps are added correctly when chord are + run in parallel. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + tasks = [self.add.s(i, i) for i in range(10)] + + sum_task = self.xsum.s() + sum_task_res = sum_task.freeze() + prod_task = self.xprod.s() + prod_task_res = sum_task.freeze() + + body = group(sum_task, prod_task) + + g = chord(tasks, body, app=self.app) + g.freeze() + g.apply() + + with subtests.test("sum_task_res is stamped", groups=[body.id]): + assert sum_task_res._get_task_meta()['groups'] == [body.id] + with subtests.test("prod_task_res is stamped", groups=[body.id]): + assert prod_task_res._get_task_meta()['groups'] == [body.id] def test__get_app_does_not_exhaust_generator(self): def build_generator(): @@ -1268,7 +1807,7 @@ def test_chord_size_deserialized_element_single(self): with patch( "celery.canvas.Signature.from_dict", return_value=child_sig ) as mock_from_dict: - assert chord_sig. __length_hint__() == 1 + assert chord_sig.__length_hint__() == 1 mock_from_dict.assert_called_once_with(deserialized_child_sig) def test_chord_size_deserialized_element_many(self): @@ -1282,7 +1821,7 @@ def test_chord_size_deserialized_element_many(self): with patch( "celery.canvas.Signature.from_dict", return_value=child_sig ) as mock_from_dict: - assert chord_sig. __length_hint__() == 42 + assert chord_sig.__length_hint__() == 42 mock_from_dict.assert_has_calls([call(deserialized_child_sig)] * 42) def test_set_immutable(self): diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index af4fdee4627..11121d61c6f 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -4,6 +4,7 @@ import pytest from celery import canvas, group, result, uuid +from celery.canvas import Signature from celery.exceptions import ChordError, Retry from celery.result import AsyncResult, EagerResult, GroupResult @@ -12,6 +13,11 @@ def passthru(x): return x +class AnySignatureWithTask(Signature): + def __eq__(self, other): + return self.task == other.task + + class ChordCase: def setup(self): @@ -71,7 +77,7 @@ class AlwaysReady(TSR): with self._chord_context(AlwaysReady) as (cb, retry, _): cb.type.apply_async.assert_called_with( - ([2, 4, 8, 6],), {}, task_id=cb.id, + ([2, 4, 8, 6],), {}, task_id=cb.id, stamped_headers=['groups'], groups=[] ) # didn't retry assert not retry.call_count @@ -209,17 +215,27 @@ def test_unlock_join_timeout_default(self): def test_unlock_join_timeout_custom(self): self._test_unlock_join_timeout(timeout=5.0) - def test_unlock_with_chord_params(self): + def test_unlock_with_chord_params_default(self): @self.app.task(shared=False) def mul(x, y): return x * y from celery import chord - ch = chord(group(mul.s(1, 1), mul.s(2, 2)), mul.s(), interval=10) + g = group(mul.s(1, 1), mul.s(2, 2)) + body = mul.s() + ch = chord(g, body, interval=10) with patch.object(ch, 'run') as run: ch.apply_async() - run.assert_called_once_with(group(mul.s(1, 1), mul.s(2, 2)), mul.s(), (), task_id=None, interval=10) + run.assert_called_once_with( + AnySignatureWithTask(g), + mul.s(), + (), + task_id=None, + interval=10, + groups=[ch.tasks.id], + stamped_headers=['groups'] + ) def test_unlock_with_chord_params_and_task_id(self): @self.app.task(shared=False) @@ -227,16 +243,21 @@ def mul(x, y): return x * y from celery import chord - ch = chord(group(mul.s(1, 1), mul.s(2, 2)), mul.s(), interval=10) + g = group(mul.s(1, 1), mul.s(2, 2)) + body = mul.s() + ch = chord(g, body, interval=10) with patch.object(ch, 'run') as run: ch.apply_async(task_id=sentinel.task_id) + run.assert_called_once_with( - group(mul.s(1, 1), mul.s(2, 2)), + AnySignatureWithTask(g), mul.s(), (), task_id=sentinel.task_id, interval=10, + groups=[ch.tasks.id], + stamped_headers=['groups'] ) From 466e52cbbe00430cc4af2448ef793877389af8b8 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 29 Jun 2022 17:28:03 +0600 Subject: [PATCH 0149/1051] added changelog for v5.3.0a1 --- Changelog.rst | 49 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index a88ec2c16a1..2bb13cba8be 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -5,9 +5,56 @@ ================ This document contains change notes for bugfix & new features -in the & 5.2.x series, please see :ref:`whatsnew-5.2` for +in the master branch & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.3.0a1: + +5.3.0a1 +======= + +:release-date: 2022-06-29 5:15 P.M UTC+6:00 +:release-by: Asif Saif Uddin + +- Remove Python 3.4 compatibility code. +- call ping to set connection attr for avoiding redis parse_response error. +- Use importlib instead of deprecated pkg_resources. +- fix #7245 uid duplicated in command params. +- Fix subscribed_to maybe empty (#7232). +- Fix: Celery beat sleeps 300 seconds sometimes even when it should run a task within a few seconds (e.g. 13 seconds) #7290. +- Add security_key_password option (#7292). +- Limit elasticsearch support to below version 8.0. +- try new major release of pytest 7 (#7330). +- broker_connection_retry should no longer apply on startup (#7300). +- Remove __ne__ methods (#7257). +- fix #7200 uid and gid. +- Remove exception-throwing from the signal handler. +- Add mypy to the pipeline (#7383). +- Expose more debugging information when receiving unkown tasks. (#7405) +- Avoid importing buf_t from billiard's compat module as it was removed. +- Avoid negating a constant in a loop. (#7443) +- Ensure expiration is of float type when migrating tasks (#7385). +- load_extension_class_names - correct module_name (#7406) +- Bump pymongo[srv]>=4.0.2. +- Use inspect.getgeneratorstate in asynpool.gen_not_started (#7476). +- Fix test with missing .get() (#7479). +- azure-storage-blob>=12.11.0 +- Make start_worker, setup_default_app reusable outside of pytest. +- Ensure a proper error message is raised when id for key is empty (#7447). +- Crontab string representation does not match UNIX crontab expression. +- Worker should exit with ctx.exit to get the right exitcode for non-zero. +- Fix expiration check (#7552). +- Use callable built-in. +- Include dont_autoretry_for option in tasks. (#7556) +- fix: Syntax error in arango query. +- Fix custom headers propagation on task retries (#7555). +- Silence backend warning when eager results are stored. +- Reduce prefetch count on restart and gradually restore it (#7350). +- Improve workflow primitive subclassing (#7593). +- test kombu>=5.3.0a1,<6.0 (#7598). +- Canvas Header Stamping (#7384). + + .. _version-5.2.7: From 91935b9f245f85ca3331f53746c2599e1e260017 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 29 Jun 2022 17:33:42 +0600 Subject: [PATCH 0150/1051] =?UTF-8?q?Bump=20version:=205.2.7=20=E2=86=92?= =?UTF-8?q?=205.3.0a1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 6 +++--- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 85e1bf24d8e..2dab5aece90 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.2.7 +current_version = 5.3.0a1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 18e1425985b..b05c381ed68 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.2.7 (dawn-chorus) +:Version: 5.3.0a1 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.2.0 runs on, +Celery version 5.3.0a1 runs on, - Python (3.7, 3.8, 3.9, 3.10) - PyPy3.7 (7.3.7+) @@ -90,7 +90,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.2.0 coming from previous versions then you should read our +new to Celery v5.3.0a1 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index 053e2eadd48..dbc137b4af8 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.2.7' +__version__ = '5.3.0a1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index b5f691a8e07..59c93380803 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.2.7 (dawn-chorus) +:Version: 5.3.0a1 (dawn-chorus) :Web: https://docs.celeryproject.org/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 62168fcd03ebc45425ff2b83504fafb029a1f9d1 Mon Sep 17 00:00:00 2001 From: kacky Date: Thu, 30 Jun 2022 10:40:06 +0900 Subject: [PATCH 0151/1051] Update task-rejected signature Actual task-rejected signature is 'requeue' https://github.com/celery/celery/blob/v5.2.7/celery/worker/request.py#L609 --- docs/userguide/monitoring.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/monitoring.rst b/docs/userguide/monitoring.rst index 9a55dccc5c7..15be2b83a2b 100644 --- a/docs/userguide/monitoring.rst +++ b/docs/userguide/monitoring.rst @@ -736,7 +736,7 @@ Sent if the execution of the task failed. task-rejected ~~~~~~~~~~~~~ -:signature: ``task-rejected(uuid, requeued)`` +:signature: ``task-rejected(uuid, requeue)`` The task was rejected by the worker, possibly to be re-queued or moved to a dead letter queue. From 5bec3ce56ad39ce41fd6c9d0a3da6efcb01222cd Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 30 Jun 2022 09:55:46 +0600 Subject: [PATCH 0152/1051] remove python 3 to 2 compat import --- docs/userguide/canvas.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 2cb42254acd..740a27cfbcd 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -559,7 +559,6 @@ Here's an example errback: .. code-block:: python - from __future__ import print_function import os From ec3714edf37e773ca5372f71f7f4ee5b1b33dd5d Mon Sep 17 00:00:00 2001 From: Eric Yen Date: Tue, 28 Jun 2022 14:43:00 -0700 Subject: [PATCH 0153/1051] async chords should pass it's kwargs to the group/body --- celery/canvas.py | 6 +++--- t/integration/test_canvas.py | 30 ++++++++++++++++++++++++++++++ t/unit/tasks/test_chord.py | 2 ++ 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 4a32ae7fc5a..6207a73da41 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1705,7 +1705,7 @@ def apply_async(self, args=None, kwargs=None, task_id=None, task_id = option_task_id # chord([A, B, ...], C) - return self.run(tasks, body, args, task_id=task_id, **merged_options) + return self.run(tasks, body, args, task_id=task_id, kwargs=kwargs, **merged_options) def apply(self, args=None, kwargs=None, propagate=True, body=None, **options): @@ -1755,7 +1755,7 @@ def __length_hint__(self): def run(self, header, body, partial_args, app=None, interval=None, countdown=1, max_retries=None, eager=False, - task_id=None, **options): + task_id=None, kwargs=None, **options): app = app or self._get_app(body) group_id = header.options.get('task_id') or uuid() root_id = body.options.get('root_id') @@ -1782,7 +1782,7 @@ def run(self, header, body, partial_args, app=None, interval=None, countdown=countdown, max_retries=max_retries, ) - header_result = header(*partial_args, task_id=group_id, **options) + header_result = header.apply_async(partial_args, kwargs, task_id=group_id, **options) # The execution of a chord body is normally triggered by its header's # tasks completing. If the header is empty this will never happen, so # we execute the body manually here. diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 2d9c272ae3b..a88d14cba0b 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1424,6 +1424,36 @@ def test_group_chain(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [12, 13, 14, 15] + def test_group_kwargs(self, manager): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + c = ( + add.s(2, 2) | + group(add.s(i) for i in range(4)) | + add_to_all.s(8) + ) + res = c.apply_async(kwargs={"z": 1}) + assert res.get(timeout=TIMEOUT) == [13, 14, 15, 16] + + def test_group_args_and_kwargs(self, manager): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + c = ( + group(add.s(i) for i in range(4)) | + add_to_all.s(8) + ) + res = c.apply_async(args=(4,), kwargs={"z": 1}) + if manager.app.conf.result_backend.startswith('redis'): + # for a simple chord like the one above, redis does not guarantee + # the ordering of the results as a performance trade off. + assert set(res.get(timeout=TIMEOUT)) == {13, 14, 15, 16} + else: + assert res.get(timeout=TIMEOUT) == [13, 14, 15, 16] + def test_nested_group_chain(self, manager): try: manager.app.backend.ensure_chords_allowed() diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index 11121d61c6f..c2aad5f894f 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -232,6 +232,7 @@ def mul(x, y): mul.s(), (), task_id=None, + kwargs={}, interval=10, groups=[ch.tasks.id], stamped_headers=['groups'] @@ -255,6 +256,7 @@ def mul(x, y): mul.s(), (), task_id=sentinel.task_id, + kwargs={}, interval=10, groups=[ch.tasks.id], stamped_headers=['groups'] From 515f98b7072677439423a15035541d24bfcb2348 Mon Sep 17 00:00:00 2001 From: Gabriel Soldani <1268700+gabrielsoldani@users.noreply.github.com> Date: Wed, 6 Jul 2022 13:21:59 -0300 Subject: [PATCH 0154/1051] beat: Suppress banner output with the quiet option (#7608) * beat: Suppress banner output with the quiet option This adds missing support for the quiet command line option (`--quiet` or `-q`) for the celery beat command, which suppresses banner and version information output. Fixes #5836. * beat: Add tests for the `--quiet` option --- celery/apps/beat.py | 12 ++++++++---- celery/bin/beat.py | 3 ++- t/unit/bin/proj/scheduler.py | 6 ++++++ t/unit/bin/test_beat.py | 34 ++++++++++++++++++++++++++++++++++ 4 files changed, 50 insertions(+), 5 deletions(-) create mode 100644 t/unit/bin/proj/scheduler.py create mode 100644 t/unit/bin/test_beat.py diff --git a/celery/apps/beat.py b/celery/apps/beat.py index 8652c62730a..dbed1ed442f 100644 --- a/celery/apps/beat.py +++ b/celery/apps/beat.py @@ -44,7 +44,8 @@ def __init__(self, max_interval=None, app=None, scheduler=None, scheduler_cls=None, # XXX use scheduler redirect_stdouts=None, - redirect_stdouts_level=None, **kwargs): + redirect_stdouts_level=None, + quiet=False, **kwargs): self.app = app = app or self.app either = self.app.either self.loglevel = loglevel @@ -56,6 +57,7 @@ def __init__(self, max_interval=None, app=None, 'worker_redirect_stdouts', redirect_stdouts) self.redirect_stdouts_level = either( 'worker_redirect_stdouts_level', redirect_stdouts_level) + self.quiet = quiet self.max_interval = max_interval self.socket_timeout = socket_timeout @@ -70,8 +72,9 @@ def __init__(self, max_interval=None, app=None, self.loglevel = LOG_LEVELS[self.loglevel.upper()] def run(self): - print(str(self.colored.cyan( - f'celery beat v{VERSION_BANNER} is starting.'))) + if not self.quiet: + print(str(self.colored.cyan( + f'celery beat v{VERSION_BANNER} is starting.'))) self.init_loader() self.set_process_title() self.start_scheduler() @@ -93,7 +96,8 @@ def start_scheduler(self): schedule_filename=self.schedule, ) - print(self.banner(service)) + if not self.quiet: + print(self.banner(service)) self.setup_logging() if self.socket_timeout: diff --git a/celery/bin/beat.py b/celery/bin/beat.py index 9fcdc760794..c8a8a499b51 100644 --- a/celery/bin/beat.py +++ b/celery/bin/beat.py @@ -62,7 +62,8 @@ def beat(ctx, detach=False, logfile=None, pidfile=None, uid=None, maybe_drop_privileges(uid=uid, gid=gid) beat = partial(app.Beat, - logfile=logfile, pidfile=pidfile, **kwargs) + logfile=logfile, pidfile=pidfile, + quiet=ctx.obj.quiet, **kwargs) if detach: with detached(logfile, pidfile, uid, gid, umask, workdir): diff --git a/t/unit/bin/proj/scheduler.py b/t/unit/bin/proj/scheduler.py new file mode 100644 index 00000000000..089b4e0eaf1 --- /dev/null +++ b/t/unit/bin/proj/scheduler.py @@ -0,0 +1,6 @@ +from celery.beat import Scheduler + + +class mScheduler(Scheduler): + def tick(self): + raise Exception diff --git a/t/unit/bin/test_beat.py b/t/unit/bin/test_beat.py new file mode 100644 index 00000000000..cd401ee7620 --- /dev/null +++ b/t/unit/bin/test_beat.py @@ -0,0 +1,34 @@ +import pytest +from click.testing import CliRunner + +from celery.app.log import Logging +from celery.bin.celery import celery + + +@pytest.fixture(scope='session') +def use_celery_app_trap(): + return False + + +def test_cli(isolated_cli_runner: CliRunner): + Logging._setup = True # To avoid hitting the logging sanity checks + res = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "beat", "-S", "t.unit.bin.proj.scheduler.mScheduler"], + catch_exceptions=True + ) + assert res.exit_code == 1, (res, res.stdout) + assert res.stdout.startswith("celery beat") + assert "Configuration ->" in res.stdout + + +def test_cli_quiet(isolated_cli_runner: CliRunner): + Logging._setup = True # To avoid hitting the logging sanity checks + res = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "--quiet", "beat", "-S", "t.unit.bin.proj.scheduler.mScheduler"], + catch_exceptions=True + ) + assert res.exit_code == 1, (res, res.stdout) + assert not res.stdout.startswith("celery beat") + assert "Configuration -> " not in res.stdout From 6f8c2dff4fcc4e46f3ef774d8f770656c23bd256 Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Thu, 7 Jul 2022 11:44:55 +0200 Subject: [PATCH 0155/1051] Fix honor Django's TIME_ZONE setting See #4006 --- celery/app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/utils.py b/celery/app/utils.py index c825045ade7..0dd3409d575 100644 --- a/celery/app/utils.py +++ b/celery/app/utils.py @@ -128,7 +128,7 @@ def task_default_routing_key(self): @property def timezone(self): # this way we also support django's time zone. - return self.first('timezone', 'time_zone') + return self.first('timezone', 'TIME_ZONE') def without_defaults(self): """Return the current configuration, but without defaults.""" From ce9ab38853d2e6d68884b91afb22117e785ae32f Mon Sep 17 00:00:00 2001 From: Charles-Axel Dein <120501+charlax@users.noreply.github.com> Date: Mon, 11 Jul 2022 10:09:15 +0200 Subject: [PATCH 0156/1051] Fix link to open source tripwire in docs --- docs/userguide/security.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/security.rst b/docs/userguide/security.rst index 48d7d991afb..f880573060b 100644 --- a/docs/userguide/security.rst +++ b/docs/userguide/security.rst @@ -251,7 +251,7 @@ that can be used. .. _`OSSEC`: http://www.ossec.net/ .. _`Samhain`: http://la-samhna.de/samhain/index.html .. _`AIDE`: http://aide.sourceforge.net/ -.. _`Open Source Tripwire`: http://sourceforge.net/projects/tripwire/ +.. _`Open Source Tripwire`: https://github.com/Tripwire/tripwire-open-source .. _`ZFS`: https://en.wikipedia.org/wiki/ZFS .. rubric:: Footnotes From aa9fd8a6c06e69c7eda2a59866c3d84622c85d20 Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Wed, 13 Jul 2022 10:40:03 +0200 Subject: [PATCH 0157/1051] Don't warn about DEBUG=True for Django This warning used to be correct, but is no longer relevant since Django 1.8. See https://github.com/django/django/commit/cfcca7ccce3dc527d16757ff6dc978e50c4a2e61 for the Django-side fix. --- celery/fixups/django.py | 6 ------ t/unit/fixups/test_django.py | 9 --------- 2 files changed, 15 deletions(-) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 59fcb9e26b8..05a41663b96 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -137,7 +137,6 @@ def validate_models(self) -> None: def install(self) -> "DjangoWorkerFixup": signals.beat_embedded_init.connect(self.close_database) - signals.worker_ready.connect(self.on_worker_ready) signals.task_prerun.connect(self.on_task_prerun) signals.task_postrun.connect(self.on_task_postrun) signals.worker_process_init.connect(self.on_worker_process_init) @@ -211,8 +210,3 @@ def close_cache(self) -> None: self._cache.close_caches() except (TypeError, AttributeError): pass - - def on_worker_ready(self, **kwargs: Any) -> None: - if self._settings.DEBUG: - warnings.warn('''Using settings.DEBUG leads to a memory - leak, never use this setting in production environments!''') diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 8cdcc5c416d..3f13970e033 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -131,7 +131,6 @@ def test_install(self): sigs.beat_embedded_init.connect.assert_called_with( f.close_database, ) - sigs.worker_ready.connect.assert_called_with(f.on_worker_ready) sigs.task_prerun.connect.assert_called_with(f.on_task_prerun) sigs.task_postrun.connect.assert_called_with(f.on_task_postrun) sigs.worker_process_init.connect.assert_called_with( @@ -256,14 +255,6 @@ def test_close_cache(self): f.close_cache() f._cache.close_caches.assert_called_with() - def test_on_worker_ready(self): - with self.fixup_context(self.app) as (f, _, _): - f._settings.DEBUG = False - f.on_worker_ready() - with pytest.warns(UserWarning): - f._settings.DEBUG = True - f.on_worker_ready() - @pytest.mark.patched_module('django', 'django.db', 'django.core', 'django.core.cache', 'django.conf', 'django.db.utils') From b96ab282a8a2ea3d97d034f862e9fd6aceb0a0b5 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 18 Jul 2022 21:33:08 -0500 Subject: [PATCH 0158/1051] Scheduled weekly dependency update for week 29 (#7638) * Update sphinx-click from 4.2.0 to 4.3.0 * Update pre-commit from 2.19.0 to 2.20.0 * Pin elasticsearch to latest version 8.3.1 * Update zstandard from 0.17.0 to 0.18.0 * Update pytest-github-actions-annotate-failures from 0.1.6 to 0.1.7 * Update pycurl from 7.43.0.5 to 7.45.1 * elasticsearch<8.0 * pycurl==7.43.0.5 Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 2 +- requirements/extras/zstd.txt | 2 +- requirements/test-ci-base.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index f6e6432f103..cdb836b29cd 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery~=2.0.0 Sphinx>=3.0.0 sphinx-testing~=1.0.1 -sphinx-click==4.2.0 +sphinx-click==4.3.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index 9f5bc8a143b..73def0e68be 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.17.0 +zstandard==0.18.0 diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 23316a0aec1..efe082c33e5 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,5 +1,5 @@ pytest-cov==3.0.0 -pytest-github-actions-annotate-failures==0.1.6 +pytest-github-actions-annotate-failures==0.1.7 codecov==2.1.12 -r extras/redis.txt -r extras/sqlalchemy.txt diff --git a/requirements/test.txt b/requirements/test.txt index 66109b1c1c1..d23cbf8270c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,6 +7,6 @@ boto3>=1.9.178 moto>=2.2.6 # typing extensions mypy==0.961; platform_python_implementation=="CPython" -pre-commit==2.19.0 +pre-commit==2.20.0 -r extras/yaml.txt -r extras/msgpack.txt From 45b5c4a1d4c0c099fc4ccd13fc4c80e2ccedc088 Mon Sep 17 00:00:00 2001 From: 954 <510485871@qq.com> Date: Thu, 28 Jul 2022 12:52:51 +0800 Subject: [PATCH 0159/1051] Fixed the `on_after_finalize` cannot access `tasks` due to deadlock(#3589) --- celery/app/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/base.py b/celery/app/base.py index c21e290ed74..6ca3eaf5ada 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -253,7 +253,7 @@ def __init__(self, main=None, loader=None, backend=None, self._pending_periodic_tasks = deque() self.finalized = False - self._finalize_mutex = threading.Lock() + self._finalize_mutex = threading.RLock() self._pending = deque() self._tasks = tasks if not isinstance(self._tasks, TaskRegistry): From 0d126ef25310ee15b57520955d6c0b45540bf434 Mon Sep 17 00:00:00 2001 From: Denys Pidlisnyi <93984934+denys-pidlisnyi@users.noreply.github.com> Date: Thu, 28 Jul 2022 15:58:22 +0300 Subject: [PATCH 0160/1051] Update tasks.rst --- docs/userguide/tasks.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index f41b53e61ec..16a73ec6e79 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1934,11 +1934,14 @@ once all transactions have been committed successfully. .. code-block:: python - from django.db.transaction import on_commit + from django.db import transaction + from django.http import HttpResponseRedirect + @transaction.atomic def create_article(request): article = Article.objects.create() - on_commit(lambda: expand_abbreviations.delay(article.pk)) + transaction.on_commit(lambda: expand_abbreviations.delay(article.pk)) + return HttpResponseRedirect('/articles/') .. note:: ``on_commit`` is available in Django 1.9 and above, if you are using a From 114b65f638853d37d209f1e0a6d091a38c28cbe5 Mon Sep 17 00:00:00 2001 From: Oleg Hoefling Date: Sun, 31 Jul 2022 17:37:21 +0200 Subject: [PATCH 0161/1051] fix code block formatting error causing no rendering in docs Signed-off-by: Oleg Hoefling --- docs/userguide/canvas.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 740a27cfbcd..5904ef98807 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1201,6 +1201,7 @@ For example, the following example ``InGroupVisitor`` will label tasks that are in side of some group by lable ``in_group``. .. code-block:: python + class InGroupVisitor(StampingVisitor): def __init__(self): self.in_group = False From 8bfe805776b1fd1da9eff0b6b10a93297ecb7936 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 1 Aug 2022 13:54:42 +0600 Subject: [PATCH 0162/1051] kombu>=5.3.0b1,<6.0 --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index ef8bb368ea0..5a076c8ffad 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ pytz>=2021.3 billiard>=3.6.4.0,<5.0 -kombu>=5.3.0a1,<6.0 +kombu>=5.3.0b1,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 588a5558646ae5e863f6e0f22bcec5e57e29f1f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joon=20Hwan=20=EA=B9=80=EC=A4=80=ED=99=98?= Date: Mon, 1 Aug 2022 17:52:18 +0900 Subject: [PATCH 0163/1051] update docs website link (#7660) * update docs site url * add author --- .github/workflows/post_release_to_hacker_news.yml | 2 +- CONTRIBUTING.rst | 12 ++++++------ CONTRIBUTORS.txt | 1 + README.rst | 2 +- celery/worker/consumer/consumer.py | 4 ++-- docs/conf.py | 2 +- docs/history/changelog-2.0.rst | 2 +- docs/history/changelog-2.2.rst | 2 +- docs/history/changelog-3.0.rst | 2 +- docs/history/changelog-3.1.rst | 2 +- docs/includes/introduction.txt | 8 ++++---- docs/includes/resources.txt | 2 +- docs/templates/readme.txt | 2 +- docs/userguide/testing.rst | 2 +- examples/django/README.rst | 2 +- examples/periodic-tasks/myapp.py | 2 +- extra/generic-init.d/celerybeat | 2 +- extra/generic-init.d/celeryd | 2 +- extra/release/sphinx2rst_config.py | 2 +- extra/systemd/celery.conf | 2 +- 20 files changed, 29 insertions(+), 28 deletions(-) diff --git a/.github/workflows/post_release_to_hacker_news.yml b/.github/workflows/post_release_to_hacker_news.yml index d81bfb22c43..dddbb3c52af 100644 --- a/.github/workflows/post_release_to_hacker_news.yml +++ b/.github/workflows/post_release_to_hacker_news.yml @@ -13,5 +13,5 @@ jobs: HN_USERNAME: ${{ secrets.HN_USERNAME }} HN_PASSWORD: ${{ secrets.HN_PASSWORD }} HN_TITLE_FORMAT_SPECIFIER: Celery v%s Released! - HN_URL_FORMAT_SPECIFIER: https://docs.celeryproject.org/en/v%s/changelog.html + HN_URL_FORMAT_SPECIFIER: https://docs.celeryq.dev/en/v%s/changelog.html HN_TEST_MODE: true diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 46424cf8571..52e5a690467 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1277,7 +1277,7 @@ Packages :CI: https://travis-ci.org/#!/celery/celery :Windows-CI: https://ci.appveyor.com/project/ask/celery :PyPI: :pypi:`celery` -:docs: http://docs.celeryproject.org +:docs: https://docs.celeryq.dev ``kombu`` --------- @@ -1376,7 +1376,7 @@ Deprecated :git: https://github.com/celery/django-celery :PyPI: :pypi:`django-celery` -:docs: http://docs.celeryproject.org/en/latest/django +:docs: https://docs.celeryq.dev/en/latest/django - ``Flask-Celery`` @@ -1487,11 +1487,11 @@ following: .. _`mailing-list`: https://groups.google.com/group/celery-users -.. _`irc-channel`: http://docs.celeryproject.org/en/latest/getting-started/resources.html#irc +.. _`irc-channel`: https://docs.celeryq.dev/en/latest/getting-started/resources.html#irc -.. _`internals-guide`: http://docs.celeryproject.org/en/latest/internals/guide.html +.. _`internals-guide`: https://docs.celeryq.dev/en/latest/internals/guide.html -.. _`bundles`: http://docs.celeryproject.org/en/latest/getting-started/introduction.html#bundles +.. _`bundles`: https://docs.celeryq.dev/en/latest/getting-started/introduction.html#bundles -.. _`report an issue`: http://docs.celeryproject.org/en/latest/contributing.html#reporting-bugs +.. _`report an issue`: https://docs.celeryq.dev/en/latest/contributing.html#reporting-bugs diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 9eb5ec50180..4b99f190dbe 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -289,3 +289,4 @@ kronion, 2021/08/26 Gabor Boros, 2021/11/09 Tizian Seehaus, 2022/02/09 Oleh Romanovskyi, 2022/06/09 +JoonHwan Kim, 2022/08/01 diff --git a/README.rst b/README.rst index b05c381ed68..e6730ee3421 100644 --- a/README.rst +++ b/README.rst @@ -461,7 +461,7 @@ Be sure to also read the `Contributing to Celery`_ section in the documentation. .. _`Contributing to Celery`: - http://docs.celeryproject.org/en/master/contributing.html + https://docs.celeryq.dev/en/master/contributing.html |oc-contributors| diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 98ead56139a..f1010cf9d35 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -75,7 +75,7 @@ Or maybe you're using relative imports? Please see -http://docs.celeryq.dev/en/latest/internals/protocol.html +https://docs.celeryq.dev/en/latest/internals/protocol.html for more information. The full contents of the message body was: @@ -95,7 +95,7 @@ Please ensure your message conforms to the task message protocol as described here: -http://docs.celeryq.dev/en/latest/internals/protocol.html +https://docs.celeryq.dev/en/latest/internals/protocol.html The full contents of the message body was: %s diff --git a/docs/conf.py b/docs/conf.py index f28a5c9c72b..1e906935e91 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,7 +5,7 @@ project='Celery', version_dev='6.0', version_stable='5.0', - canonical_url='http://docs.celeryproject.org', + canonical_url='https://docs.celeryq.dev', webdomain='celeryproject.org', github_project='celery/celery', author='Ask Solem & contributors', diff --git a/docs/history/changelog-2.0.rst b/docs/history/changelog-2.0.rst index 4d238776aec..93110a490fa 100644 --- a/docs/history/changelog-2.0.rst +++ b/docs/history/changelog-2.0.rst @@ -332,7 +332,7 @@ Documentation * New homepage design by Jan Henrik Helmers: http://celeryproject.org -* New Sphinx theme by Armin Ronacher: http://docs.celeryproject.org/ +* New Sphinx theme by Armin Ronacher: https://docs.celeryq.dev/ * Fixed "pending_xref" errors shown in the HTML rendering of the documentation. Apparently this was caused by new changes in Sphinx 1.0b2. diff --git a/docs/history/changelog-2.2.rst b/docs/history/changelog-2.2.rst index 33e70de46b8..435caf9a216 100644 --- a/docs/history/changelog-2.2.rst +++ b/docs/history/changelog-2.2.rst @@ -138,7 +138,7 @@ News ---- * Our documentation is now hosted by Read The Docs - (http://docs.celeryproject.org), and all links have been changed to point to + (https://docs.celeryq.dev), and all links have been changed to point to the new URL. * Logging: Now supports log rotation using external tools like `logrotate.d`_ diff --git a/docs/history/changelog-3.0.rst b/docs/history/changelog-3.0.rst index af54fbc3616..c5385d0e727 100644 --- a/docs/history/changelog-3.0.rst +++ b/docs/history/changelog-3.0.rst @@ -822,7 +822,7 @@ If you're looking for versions prior to 3.0.x you should go to :ref:`history`. - Development documentation has moved to Read The Docs. - The new URL is: http://docs.celeryproject.org/en/master + The new URL is: https://docs.celeryq.dev/en/master - New :setting:`CELERY_QUEUE_HA_POLICY` setting used to set the default HA policy for queues when using RabbitMQ. diff --git a/docs/history/changelog-3.1.rst b/docs/history/changelog-3.1.rst index f7c72c31370..d2b33866b45 100644 --- a/docs/history/changelog-3.1.rst +++ b/docs/history/changelog-3.1.rst @@ -53,7 +53,7 @@ new in Celery 3.1. messages from clients/workers running 4.0. .. _`new task message protocol`: - http://docs.celeryproject.org/en/master/internals/protocol.html#version-2 + https://docs.celeryq.dev/en/master/internals/protocol.html#version-2 - ``Task.send_events`` can now be set to disable sending of events for that task only. diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 59c93380803..cde308394d1 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,5 +1,5 @@ :Version: 5.3.0a1 (dawn-chorus) -:Web: https://docs.celeryproject.org/en/stable/index.html +:Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ :Keywords: task, queue, job, async, rabbitmq, amqp, redis, @@ -80,10 +80,10 @@ getting started tutorials: A more complete overview, showing more features. .. _`First steps with Celery`: - http://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html + https://docs.celeryq.dev/en/latest/getting-started/first-steps-with-celery.html .. _`Next steps`: - http://docs.celeryproject.org/en/latest/getting-started/next-steps.html + https://docs.celeryq.dev/en/latest/getting-started/next-steps.html Celery is… ============= @@ -198,4 +198,4 @@ Documentation The `latest documentation`_ is hosted at Read The Docs, containing user guides, tutorials, and an API reference. -.. _`latest documentation`: http://docs.celeryproject.org/en/latest/ +.. _`latest documentation`: https://docs.celeryq.dev/en/latest/ diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 07681a464d7..f2c1c539fb1 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -53,7 +53,7 @@ Be sure to also read the `Contributing to Celery`_ section in the documentation. .. _`Contributing to Celery`: - http://docs.celeryproject.org/en/master/contributing.html + https://docs.celeryq.dev/en/master/contributing.html .. _license: diff --git a/docs/templates/readme.txt b/docs/templates/readme.txt index fba5a12155d..b3bb98383b8 100644 --- a/docs/templates/readme.txt +++ b/docs/templates/readme.txt @@ -1,4 +1,4 @@ -.. image:: http://docs.celeryproject.org/en/latest/_images/celery-banner-small.png +.. image:: https://docs.celeryq.dev/en/latest/_images/celery-banner-small.png |build-status| |license| |wheel| |pyversion| |pyimp| diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index a938aec70ca..dcf9cdc35b2 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -47,7 +47,7 @@ Say we had a task like this: raise self.retry(exc=exc) -``Note``: A task being `bound `_ means the first +``Note``: A task being `bound `_ means the first argument to the task will always be the task instance (self). which means you do get a self argument as the first argument and can use the Task class methods and attributes. diff --git a/examples/django/README.rst b/examples/django/README.rst index 80d7a13cadd..0bb8ef49315 100644 --- a/examples/django/README.rst +++ b/examples/django/README.rst @@ -33,7 +33,7 @@ Installing requirements The settings file assumes that ``rabbitmq-server`` is running on ``localhost`` using the default ports. More information here: -http://docs.celeryproject.org/en/latest/getting-started/brokers/rabbitmq.html +https://docs.celeryq.dev/en/latest/getting-started/brokers/rabbitmq.html In addition, some Python requirements must also be satisfied: diff --git a/examples/periodic-tasks/myapp.py b/examples/periodic-tasks/myapp.py index b2e4f0b8045..c30e467010c 100644 --- a/examples/periodic-tasks/myapp.py +++ b/examples/periodic-tasks/myapp.py @@ -53,7 +53,7 @@ def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task(10.0, say.s('hello'), name='add every 10') # See periodic tasks user guide for more examples: - # http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html + # https://docs.celeryq.dev/en/latest/userguide/periodic-tasks.html if __name__ == '__main__': diff --git a/extra/generic-init.d/celerybeat b/extra/generic-init.d/celerybeat index c875e33e27d..8007a2d1325 100755 --- a/extra/generic-init.d/celerybeat +++ b/extra/generic-init.d/celerybeat @@ -6,7 +6,7 @@ # :Usage: /etc/init.d/celerybeat {start|stop|force-reload|restart|try-restart|status} # :Configuration file: /etc/default/celerybeat or /etc/default/celeryd # -# See http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#generic-init-scripts +# See https://docs.celeryq.dev/en/latest/userguide/daemonizing.html#generic-init-scripts ### BEGIN INIT INFO # Provides: celerybeat diff --git a/extra/generic-init.d/celeryd b/extra/generic-init.d/celeryd index b928eebeb70..b2c05d56ba0 100755 --- a/extra/generic-init.d/celeryd +++ b/extra/generic-init.d/celeryd @@ -6,7 +6,7 @@ # :Usage: /etc/init.d/celeryd {start|stop|force-reload|restart|try-restart|status} # :Configuration file: /etc/default/celeryd (or /usr/local/etc/celeryd on BSD) # -# See http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#generic-init-scripts +# See https://docs.celeryq.dev/en/latest/userguide/daemonizing.html#generic-init-scripts ### BEGIN INIT INFO diff --git a/extra/release/sphinx2rst_config.py b/extra/release/sphinx2rst_config.py index 2ab10310865..21fc59b1978 100644 --- a/extra/release/sphinx2rst_config.py +++ b/extra/release/sphinx2rst_config.py @@ -1,4 +1,4 @@ -REFBASE = 'http://docs.celeryproject.org/en/latest' +REFBASE = 'https://docs.celeryq.dev/en/latest' REFS = { 'mailing-list': 'https://groups.google.com/group/celery-users', diff --git a/extra/systemd/celery.conf b/extra/systemd/celery.conf index 8997c3d4576..14d95df4b02 100644 --- a/extra/systemd/celery.conf +++ b/extra/systemd/celery.conf @@ -1,5 +1,5 @@ # See -# http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#usage-systemd +# https://docs.celeryq.dev/en/latest/userguide/daemonizing.html#usage-systemd CELERY_APP="proj" CELERYD_NODES="worker" From b35c1afd8f845b48b24ada6b0f378aa0076c99e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oleg=20H=C3=B6fling?= Date: Mon, 1 Aug 2022 11:14:05 +0200 Subject: [PATCH 0164/1051] fix doc rendering issues, part I (#7656) * fix wrong position of ref label for database backend settings section Signed-off-by: Oleg Hoefling * use plaintext renderer to highlight url code block Signed-off-by: Oleg Hoefling * match title underlines to text length Signed-off-by: Oleg Hoefling * fix bullet list formatting in changelog-5.0 Signed-off-by: Oleg Hoefling * add celery.bin.amqp module reference Signed-off-by: Oleg Hoefling * fix setting ref syntax Signed-off-by: Oleg Hoefling * fix broken document refs in whatsnew-5.1 Signed-off-by: Oleg Hoefling * disable refs to nonexistent cli options Signed-off-by: Oleg Hoefling * change duplicate ref label name of changelog-5.1 Signed-off-by: Oleg Hoefling * fix wrong setting role value in whatsnew-5.2 Signed-off-by: Oleg Hoefling * fix broken refs in workers Signed-off-by: Oleg Hoefling --- docs/getting-started/next-steps.rst | 2 +- docs/history/changelog-2.2.rst | 9 +++------ docs/history/changelog-2.3.rst | 5 ++--- docs/history/changelog-2.4.rst | 4 ++-- docs/history/changelog-3.1.rst | 2 +- docs/history/changelog-5.0.rst | 2 +- docs/history/changelog-5.1.rst | 2 +- docs/history/whatsnew-5.1.rst | 12 ++++++------ docs/reference/celery.bin.amqp.rst | 11 +++++++++++ docs/userguide/configuration.rst | 15 +++++++-------- docs/userguide/monitoring.rst | 2 +- docs/userguide/workers.rst | 4 ++-- docs/whatsnew-5.2.rst | 8 ++++---- 13 files changed, 42 insertions(+), 36 deletions(-) create mode 100644 docs/reference/celery.bin.amqp.rst diff --git a/docs/getting-started/next-steps.rst b/docs/getting-started/next-steps.rst index d919d0e57c5..286ff41261a 100644 --- a/docs/getting-started/next-steps.rst +++ b/docs/getting-started/next-steps.rst @@ -127,7 +127,7 @@ and prioritization, all described in the :ref:`Routing Guide `. You can get a complete list of command-line arguments -by passing in the :option:`--help ` flag: +by passing in the :option:`!--help` flag: .. code-block:: console diff --git a/docs/history/changelog-2.2.rst b/docs/history/changelog-2.2.rst index 435caf9a216..4b5d28233f2 100644 --- a/docs/history/changelog-2.2.rst +++ b/docs/history/changelog-2.2.rst @@ -20,8 +20,8 @@ Security Fixes -------------- * [Security: `CELERYSA-0001`_] Daemons would set effective id's rather than - real id's when the :option:`--uid `/ - :option:`--gid ` arguments to :program:`celery multi`, + real id's when the :option:`!--uid`/ + :option:`!--gid` arguments to :program:`celery multi`, :program:`celeryd_detach`, :program:`celery beat` and :program:`celery events` were used. @@ -47,7 +47,7 @@ Security Fixes * Redis result backend now works with Redis 2.4.4. -* multi: The :option:`--gid ` option now works correctly. +* multi: The :option:`!--gid` option now works correctly. * worker: Retry wrongfully used the repr of the traceback instead of the string representation. @@ -1026,6 +1026,3 @@ Experimental def my_view(request): with pool.acquire() as publisher: add.apply_async((2, 2), publisher=publisher, retry=True) - - - diff --git a/docs/history/changelog-2.3.rst b/docs/history/changelog-2.3.rst index 67bbb64dd49..cac7c1a7e78 100644 --- a/docs/history/changelog-2.3.rst +++ b/docs/history/changelog-2.3.rst @@ -20,8 +20,8 @@ Security Fixes -------------- * [Security: `CELERYSA-0001`_] Daemons would set effective id's rather than - real id's when the :option:`--uid `/ - :option:`--gid ` arguments to :program:`celery multi`, + real id's when the :option:`!--uid`/ + :option:`!--gid` arguments to :program:`celery multi`, :program:`celeryd_detach`, :program:`celery beat` and :program:`celery events` were used. @@ -368,4 +368,3 @@ Fixes * Remote control command ``add_consumer`` now does nothing if the queue is already being consumed from. - diff --git a/docs/history/changelog-2.4.rst b/docs/history/changelog-2.4.rst index 93745de2235..82073e176af 100644 --- a/docs/history/changelog-2.4.rst +++ b/docs/history/changelog-2.4.rst @@ -37,8 +37,8 @@ Security Fixes -------------- * [Security: `CELERYSA-0001`_] Daemons would set effective id's rather than - real id's when the :option:`--uid `/ - :option:`--gid ` arguments to + real id's when the :option:`!--uid`/ + :option:`!--gid` arguments to :program:`celery multi`, :program:`celeryd_detach`, :program:`celery beat` and :program:`celery events` were used. diff --git a/docs/history/changelog-3.1.rst b/docs/history/changelog-3.1.rst index d2b33866b45..4bb58c4f5a4 100644 --- a/docs/history/changelog-3.1.rst +++ b/docs/history/changelog-3.1.rst @@ -638,7 +638,7 @@ new in Celery 3.1. - **Django**: Compatibility with Django 1.7 on Windows (Issue #2126). -- **Programs**: :option:`--umask ` argument can now be +- **Programs**: :option:`!--umask` argument can now be specified in both octal (if starting with 0) or decimal. diff --git a/docs/history/changelog-5.0.rst b/docs/history/changelog-5.0.rst index 78832a373dc..13daf51fa03 100644 --- a/docs/history/changelog-5.0.rst +++ b/docs/history/changelog-5.0.rst @@ -20,7 +20,7 @@ an overview of what's new in Celery 5.0. - --quiet flag now actually makes celery avoid producing logs (#6599). - pass_context for handle_preload_options decorator (#6583). - Fix --pool=threads support in command line options parsing (#6787). -Fix the behavior of our json serialization which regressed in 5.0 (#6561). +- Fix the behavior of our json serialization which regressed in 5.0 (#6561). - celery -A app events -c camera now works as expected (#6774). .. _version-5.0.5: diff --git a/docs/history/changelog-5.1.rst b/docs/history/changelog-5.1.rst index 5b724b1536d..4a6cc5dc5ee 100644 --- a/docs/history/changelog-5.1.rst +++ b/docs/history/changelog-5.1.rst @@ -1,4 +1,4 @@ -.. _changelog: +.. _changelog-5.1: ================ Change history diff --git a/docs/history/whatsnew-5.1.rst b/docs/history/whatsnew-5.1.rst index a1c7416cdda..237b9722ba6 100644 --- a/docs/history/whatsnew-5.1.rst +++ b/docs/history/whatsnew-5.1.rst @@ -290,10 +290,10 @@ you should import `kombu.utils.encoding` instead. If you were using the `celery.task` module before, you should import directly from the `celery` module instead. -If you were using `from celery.task import Task` you should use +If you were using `from celery.task import Task` you should use `from celery import Task` instead. -If you were using the `celery.task` decorator you should use +If you were using the `celery.task` decorator you should use `celery.shared_task` instead. @@ -330,7 +330,7 @@ Support for Redis username authentication Previously, the username was ignored from the URI. Starting from Redis>=6.0, that shouldn't be the case since ACL support has landed. -Please refer to the :ref:`documentation <_conf-redis-result-backend>` for details. +Please refer to the :ref:`documentation ` for details. SQS transport - support back off policy ---------------------------------------- @@ -339,7 +339,7 @@ SQS now supports managed visibility timeout. This lets us implement a back off policy (for instance, an exponential policy) which means that the time between task failures will dynamically change based on the number of retries. -Documentation: :doc:`reference/kombu.transport.SQS.rst` +Documentation: :doc:`kombu:reference/kombu.transport.SQS` Duplicate successful tasks --------------------------- @@ -393,7 +393,7 @@ SQS - support STS authentication with AWS The STS token requires a refresh after a certain period of time. After `sts_token_timeout` is reached, a new token will be created. -Documentation: :doc:`getting-started/backends-and-brokers/sqs.rst` +Documentation: :doc:`/getting-started/backends-and-brokers/sqs` Support Redis `health_check_interval` ------------------------------------- @@ -416,4 +416,4 @@ Support Redis Sentinel with SSL ------------------------------- See documentation for more info: -:doc:`getting-started/backends-and-brokers/redis.rst` +:doc:`/getting-started/backends-and-brokers/redis` diff --git a/docs/reference/celery.bin.amqp.rst b/docs/reference/celery.bin.amqp.rst new file mode 100644 index 00000000000..13a9c0e2d7b --- /dev/null +++ b/docs/reference/celery.bin.amqp.rst @@ -0,0 +1,11 @@ +==================== + ``celery.bin.amqp`` +==================== + +.. contents:: + :local: +.. currentmodule:: celery.bin.amqp + +.. automodule:: celery.bin.amqp + :members: + :undoc-members: diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 81481aa3c88..ebe9c968664 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -596,7 +596,7 @@ This value is used for tasks that doesn't have a custom rate limit .. seealso:: - The setting:`worker_disable_rate_limits` setting can + The :setting:`worker_disable_rate_limits` setting can disable all rate limits. .. _conf-result-backend: @@ -854,9 +854,6 @@ Default: 1.0. Default interval for retrying chord tasks. -.. _conf-database-result-backend: - - .. setting:: override_backends ``override_backends`` @@ -876,7 +873,7 @@ Example: override_backends = {"db": "custom_module.backend.class"} - +.. _conf-database-result-backend: Database backend settings ------------------------- @@ -1681,7 +1678,7 @@ The name for the storage container in which to store the results. .. setting:: azureblockblob_base_path ``azureblockblob_base_path`` -~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. versionadded:: 5.1 @@ -1729,7 +1726,7 @@ Timeout in seconds for establishing the azure block blob connection. .. setting:: azureblockblob_read_timeout ``azureblockblob_read_timeout`` -~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Default: 120. @@ -2182,7 +2179,9 @@ or:: The backend will store results in the K/V store of Consul as individual keys. The backend supports auto expire of results using TTLs in -Consul. The full syntax of the URL is:: +Consul. The full syntax of the URL is: + +.. code-block:: text consul://host:port[?one_client=1] diff --git a/docs/userguide/monitoring.rst b/docs/userguide/monitoring.rst index 15be2b83a2b..c65e8413aa6 100644 --- a/docs/userguide/monitoring.rst +++ b/docs/userguide/monitoring.rst @@ -353,7 +353,7 @@ and it includes a tool to dump events to :file:`stdout`: $ celery -A proj events --dump -For a complete list of options use :option:`--help `: +For a complete list of options use :option:`!--help`: .. code-block:: console diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 9b8c2a4387d..f6524752c42 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -624,7 +624,7 @@ which needs two numbers: the maximum and minimum number of pool processes: 10 if necessary). You can also define your own rules for the autoscaler by subclassing -:class:`~celery.worker.autoscaler.Autoscaler`. +:class:`~celery.worker.autoscale.Autoscaler`. Some ideas for metrics include load average or the amount of memory available. You can specify a custom autoscaler with the :setting:`worker_autoscaler` setting. @@ -970,7 +970,7 @@ There are two types of remote control commands: Remote control commands are registered in the control panel and they take a single argument: the current -:class:`~celery.worker.control.ControlDispatch` instance. +:class:`!celery.worker.control.ControlDispatch` instance. From there you have access to the active :class:`~celery.worker.consumer.Consumer` if needed. diff --git a/docs/whatsnew-5.2.rst b/docs/whatsnew-5.2.rst index 1180a653c63..3e2a8700a64 100644 --- a/docs/whatsnew-5.2.rst +++ b/docs/whatsnew-5.2.rst @@ -330,10 +330,10 @@ older `azure-servicebus` versions. .. _v520-news: -Bug: Pymongo 3.12.1 is not compatible with Celery 5.2 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Bug: Pymongo 3.12.1 is not compatible with Celery 5.2 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For now we are limiting Pymongo version, only allowing for versions between 3.3.0 and 3.12.0. +For now we are limiting Pymongo version, only allowing for versions between 3.3.0 and 3.12.0. This will be fixed in the next patch. @@ -390,4 +390,4 @@ You can now check the validity of the CA certificate while making a TLS connection to ArangoDB result backend. If you'd like to do so, set the ``verify`` key in the -:setting:`arangodb_backend_settings`` dictionary to ``True``. +:setting:`arangodb_backend_settings` dictionary to ``True``. From d29610bac81a1689b53440e6347b9c5ced038751 Mon Sep 17 00:00:00 2001 From: Gabriel Soldani <1268700+gabrielsoldani@users.noreply.github.com> Date: Mon, 1 Aug 2022 07:00:39 -0300 Subject: [PATCH 0165/1051] Make default worker state limits configurable (#7609) * Make default worker state limits configurable Previously, `REVOKES_MAX`, `REVOKE_EXPIRES`, `SUCCESSFUL_MAX` and `SUCCESSFUL_EXPIRES` were hardcoded in `celery.worker.state`. This patch introduces `CELERY_WORKER_` prefixed environment variables with the same names that allow you to customize these values should you need to. Fixes #3576. * Add tests for configurable worker state limits --- celery/worker/state.py | 8 ++++---- docs/userguide/workers.rst | 14 ++++++++++++++ t/unit/worker/test_state.py | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/celery/worker/state.py b/celery/worker/state.py index 3afb2e8e3b9..97f49150286 100644 --- a/celery/worker/state.py +++ b/celery/worker/state.py @@ -32,18 +32,18 @@ } #: maximum number of revokes to keep in memory. -REVOKES_MAX = 50000 +REVOKES_MAX = int(os.environ.get('CELERY_WORKER_REVOKES_MAX', 50000)) #: maximum number of successful tasks to keep in memory. -SUCCESSFUL_MAX = 1000 +SUCCESSFUL_MAX = int(os.environ.get('CELERY_WORKER_SUCCESSFUL_MAX', 1000)) #: how many seconds a revoke will be active before #: being expired when the max limit has been exceeded. -REVOKE_EXPIRES = 10800 +REVOKE_EXPIRES = float(os.environ.get('CELERY_WORKER_REVOKE_EXPIRES', 10800)) #: how many seconds a successful task will be cached in memory #: before being expired when the max limit has been exceeded. -SUCCESSFUL_EXPIRES = 10800 +SUCCESSFUL_EXPIRES = float(os.environ.get('CELERY_WORKER_SUCCESSFUL_EXPIRES', 10800)) #: Mapping of reserved task_id->Request. requests = {} diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index f6524752c42..03ac8a9aa5e 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -358,6 +358,20 @@ Commands All worker nodes keeps a memory of revoked task ids, either in-memory or persistent on disk (see :ref:`worker-persistent-revokes`). +.. note:: + + The maximum number of revoked tasks to keep in memory can be + specified using the ``CELERY_WORKER_REVOKES_MAX`` environment + variable, which defaults to 50000. When the limit has been exceeded, + the revokes will be active for 10800 seconds (3 hours) before being + expired. This value can be changed using the + ``CELERY_WORKER_REVOKE_EXPIRES`` environment variable. + + Memory limits can also be set for successful tasks through the + ``CELERY_WORKER_SUCCESSFUL_MAX`` and + ``CELERY_WORKER_SUCCESSFUL_EXPIRES`` environment variables, and + default to 1000 and 10800 respectively. + When a worker receives a revoke request it will skip executing the task, but it won't terminate an already executing task unless the `terminate` option is set. diff --git a/t/unit/worker/test_state.py b/t/unit/worker/test_state.py index 571fc4be32d..7388c49bb9f 100644 --- a/t/unit/worker/test_state.py +++ b/t/unit/worker/test_state.py @@ -1,4 +1,7 @@ +import os import pickle +import sys +from importlib import import_module from time import time from unittest.mock import Mock, patch @@ -187,3 +190,32 @@ def test_ready(self, requests=[SimpleReq('foo'), for request in requests: state.task_ready(request) assert len(state.active_requests) == 0 + + +class test_state_configuration(): + + @staticmethod + def import_state(): + with patch.dict(sys.modules): + del sys.modules['celery.worker.state'] + return import_module('celery.worker.state') + + @patch.dict(os.environ, { + 'CELERY_WORKER_REVOKES_MAX': '50001', + 'CELERY_WORKER_SUCCESSFUL_MAX': '1001', + 'CELERY_WORKER_REVOKE_EXPIRES': '10801', + 'CELERY_WORKER_SUCCESSFUL_EXPIRES': '10801', + }) + def test_custom_configuration(self): + state = self.import_state() + assert state.REVOKES_MAX == 50001 + assert state.SUCCESSFUL_MAX == 1001 + assert state.REVOKE_EXPIRES == 10801 + assert state.SUCCESSFUL_EXPIRES == 10801 + + def test_default_configuration(self): + state = self.import_state() + assert state.REVOKES_MAX == 50000 + assert state.SUCCESSFUL_MAX == 1000 + assert state.REVOKE_EXPIRES == 10800 + assert state.SUCCESSFUL_EXPIRES == 10800 From 0aeac3da61e624cba755bec0576de72893766c6f Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Mon, 31 Jan 2022 16:55:30 +0200 Subject: [PATCH 0166/1051] Only clear the cache if there are no active writers. In #6863 we discarded all jobs if synack isn't enabled for the pool. This fixed a severe memory leak which occurs on connection restart. Instead of going over each job and checking if we should discard it, we should clear the entire cache when there are no active writers. If there are active writers, we should discard the jobs from the cache after we're done writing them since they also may remain on the cache forever. --- celery/concurrency/asynpool.py | 64 +++++++++++++++++++--------------- 1 file changed, 35 insertions(+), 29 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 28a1e09b80c..b8087ad3e3c 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -987,12 +987,10 @@ def flush(self): return # cancel all tasks that haven't been accepted so that NACK is sent # if synack is enabled. - for job in tuple(self._cache.values()): - if not job._accepted: - if self.synack: + if self.synack: + for job in self._cache.values(): + if not job._accepted: job._cancel() - else: - job.discard() # clear the outgoing buffer as the tasks will be redelivered by # the broker anyway. @@ -1008,37 +1006,45 @@ def flush(self): if self._state == RUN: # flush outgoing buffers intervals = fxrange(0.01, 0.1, 0.01, repeatlast=True) + + # TODO: Rewrite this as a dictionary comprehension once we drop support for Python 3.7 + # This dict comprehension requires the walrus operator which is only available in 3.8. owned_by = {} for job in self._cache.values(): writer = _get_job_writer(job) if writer is not None: owned_by[writer] = job - while self._active_writers: - writers = list(self._active_writers) - for gen in writers: - if (gen.__name__ == '_write_job' and - gen_not_started(gen)): - # hasn't started writing the job so can - # discard the task, but we must also remove - # it from the Pool._cache. - try: - job = owned_by[gen] - except KeyError: - pass - else: - # removes from Pool._cache - job.discard() - self._active_writers.discard(gen) - else: - try: - job = owned_by[gen] - except KeyError: - pass + if not self._active_writers: + self._cache.clear() + else: + while self._active_writers: + writers = list(self._active_writers) + for gen in writers: + if (gen.__name__ == '_write_job' and + gen_not_started(gen)): + # hasn't started writing the job so can + # discard the task, but we must also remove + # it from the Pool._cache. + try: + job = owned_by[gen] + except KeyError: + pass + else: + # removes from Pool._cache + job.discard() + self._active_writers.discard(gen) else: - job_proc = job._write_to - if job_proc._is_alive(): - self._flush_writer(job_proc, gen) + try: + job = owned_by[gen] + except KeyError: + pass + else: + job_proc = job._write_to + if job_proc._is_alive(): + self._flush_writer(job_proc, gen) + + job.discard() # workers may have exited in the meantime. self.maintain_pool() sleep(next(intervals)) # don't busyloop From ed069b9f857630032efcff5fcc1333cea4280170 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 1 Aug 2022 17:13:33 +0600 Subject: [PATCH 0167/1051] changelog for v5.3.0b1 --- Changelog.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index 2bb13cba8be..21cdff3978b 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,26 @@ This document contains change notes for bugfix & new features in the master branch & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.3.0b1: + +5.3.0b1 +======= + +:release-date: 2022-08-01 5:15 P.M UTC+6:00 +:release-by: Asif Saif Uddin + +- Canvas Header Stamping (#7384). +- async chords should pass it's kwargs to the group/body. +- beat: Suppress banner output with the quiet option (#7608). +- Fix honor Django's TIME_ZONE setting. +- Don't warn about DEBUG=True for Django. +- Fixed the on_after_finalize cannot access tasks due to deadlock. +- Bump kombu>=5.3.0b1,<6.0. +- Make default worker state limits configurable (#7609). +- Only clear the cache if there are no active writers. +- Billiard 4.0.1 + + .. _version-5.3.0a1: 5.3.0a1 From feaad3f9fdf98d0453a07a68e307e48c6c3c2550 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 1 Aug 2022 17:16:37 +0600 Subject: [PATCH 0168/1051] =?UTF-8?q?Bump=20version:=205.3.0a1=20=E2=86=92?= =?UTF-8?q?=205.3.0b1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 2dab5aece90..02c8c493039 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.0a1 +current_version = 5.3.0b1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index e6730ee3421..33ddcf75c7c 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0a1 (dawn-chorus) +:Version: 5.3.0b1 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index dbc137b4af8..7c2de763898 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.3.0a1' +__version__ = '5.3.0b1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index cde308394d1..cc2017543d6 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0a1 (dawn-chorus) +:Version: 5.3.0b1 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From c3c6594b4cdea898abba218f576a669700dba98d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 2 Aug 2022 19:42:11 +0300 Subject: [PATCH 0169/1051] BLM-2: Adding unit tests to chord clone (#7668) * Added .python-version and .vscode to .gitignore * Added test_chord_clone_kwargs() to verify chord cloning treats kwargs correctly * Happify linter --- .gitignore | 2 ++ t/unit/tasks/test_canvas.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/.gitignore b/.gitignore index 0a51be7b118..4f206fdb28c 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,5 @@ htmlcov/ coverage.xml test.db pip-wheel-metadata/ +.python-version +.vscode/ \ No newline at end of file diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index f7b5f7cac9f..677cb190b3d 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -2025,6 +2025,23 @@ def test_from_dict_deep_deserialize_chain(self, subtests): ): assert isinstance(deserialized_chord.body, _chain) + def test_chord_clone_kwargs(self, subtests): + """ Test that chord clone ensures the kwargs are the same """ + + with subtests.test(msg='Verify chord cloning clones kwargs correctly'): + c = chord([signature('g'), signature('h')], signature('i'), kwargs={'U': 6}) + c2 = c.clone() + assert c2.kwargs == c.kwargs + + with subtests.test(msg='Cloning the chord with overridden kwargs'): + override_kw = {'X': 2} + c3 = c.clone(args=(1,), kwargs=override_kw) + + with subtests.test(msg='Verify the overridden kwargs were cloned correctly'): + new_kw = c.kwargs.copy() + new_kw.update(override_kw) + assert c3.kwargs == new_kw + class test_maybe_signature(CanvasCase): From bdbf6d6ae1aca9addd81800b5dd2e8c3477afb18 Mon Sep 17 00:00:00 2001 From: Dan Cecile Date: Wed, 3 Aug 2022 16:48:31 -0400 Subject: [PATCH 0170/1051] Fix unknown task error typo --- celery/worker/consumer/consumer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index f1010cf9d35..2aeccff2111 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -81,7 +81,7 @@ The full contents of the message body was: %s -Thw full contents of the message headers: +The full contents of the message headers: %s The delivery info for this task is: From 4261546f148deb3f46556f917d44de2ddb18a383 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Fri, 12 Aug 2022 10:27:11 +0200 Subject: [PATCH 0171/1051] rename redis integration test class so that tests are executed (#7684) * rename test class so it's executed * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add manager so task is executed * fix test skipping * make tests independent of prior results Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- t/integration/test_tasks.py | 44 ++++++++++++++++++++++++------------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 198881b891c..bfbaaab2723 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -288,29 +288,43 @@ def test_properties(self, celery_session_worker): assert res.get(timeout=TIMEOUT)["app_id"] == "1234" -class tests_task_redis_result_backend: - def setup(self, manager): +class test_task_redis_result_backend: + @pytest.fixture() + def manager(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') - def test_ignoring_result_no_subscriptions(self): - assert get_active_redis_channels() == [] + return manager + + def test_ignoring_result_no_subscriptions(self, manager): + channels_before_test = get_active_redis_channels() + result = add_ignore_result.delay(1, 2) assert result.ignored is True - assert get_active_redis_channels() == [] - def test_asyncresult_forget_cancels_subscription(self): + new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] + assert new_channels == [] + + def test_asyncresult_forget_cancels_subscription(self, manager): + channels_before_test = get_active_redis_channels() + result = add.delay(1, 2) - assert get_active_redis_channels() == [ - f"celery-task-meta-{result.id}" - ] + assert set(get_active_redis_channels()) == { + f"celery-task-meta-{result.id}".encode(), *channels_before_test + } result.forget() - assert get_active_redis_channels() == [] - def test_asyncresult_get_cancels_subscription(self): + new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] + assert new_channels == [] + + def test_asyncresult_get_cancels_subscription(self, manager): + channels_before_test = get_active_redis_channels() + result = add.delay(1, 2) - assert get_active_redis_channels() == [ - f"celery-task-meta-{result.id}" - ] + assert set(get_active_redis_channels()) == { + f"celery-task-meta-{result.id}".encode(), *channels_before_test + } assert result.get(timeout=3) == 3 - assert get_active_redis_channels() == [] + + new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] + assert new_channels == [] From 6f95c040ae80df5256073c4827d838e8c1d20ae5 Mon Sep 17 00:00:00 2001 From: Oskar Vuola Date: Sun, 14 Aug 2022 08:59:19 +0000 Subject: [PATCH 0172/1051] Check certificate/private key type when loading them (#7680) * Possible fix for uncaught rsa key error * Raise ValueError when non-RSA certificate key is used * Add certificate public key type check to Certificate.__init__. Public key must be of type RSAPublicKey, otherwise Certificate.verify method will fail * Add unit tests for invalid key/certificate type * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update t/unit/security/__init__.py * Fix linting Co-authored-by: Oskar Vuola Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/security/certificate.py | 9 ++++++--- celery/security/key.py | 5 ++++- t/unit/security/__init__.py | 30 +++++++++++++++++++++++++++++ t/unit/security/test_certificate.py | 4 +++- t/unit/security/test_key.py | 7 ++++++- 5 files changed, 49 insertions(+), 6 deletions(-) diff --git a/celery/security/certificate.py b/celery/security/certificate.py index 0c31bb79f31..d259734cb13 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -4,7 +4,7 @@ import os from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives.asymmetric import padding, rsa from cryptography.x509 import load_pem_x509_certificate from kombu.utils.encoding import bytes_to_str, ensure_bytes @@ -25,12 +25,15 @@ def __init__(self, cert): self._cert = load_pem_x509_certificate( ensure_bytes(cert), backend=default_backend()) + if not isinstance(self._cert.public_key(), rsa.RSAPublicKey): + raise ValueError("Non-RSA certificates are not supported.") + def has_expired(self): """Check if the certificate has expired.""" return datetime.datetime.utcnow() >= self._cert.not_valid_after - def get_pubkey(self): - """Get public key from certificate.""" + def get_pubkey(self) -> rsa.RSAPublicKey: + """Get public key from certificate. Public key type is checked in __init__.""" return self._cert.public_key() def get_serial_number(self): diff --git a/celery/security/key.py b/celery/security/key.py index 2c4882b6f80..d001059077f 100644 --- a/celery/security/key.py +++ b/celery/security/key.py @@ -1,7 +1,7 @@ """Private keys for the security serializer.""" from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives.asymmetric import padding, rsa from kombu.utils.encoding import ensure_bytes from .utils import reraise_errors @@ -21,6 +21,9 @@ def __init__(self, key, password=None): password=ensure_bytes(password), backend=default_backend()) + if not isinstance(self._key, rsa.RSAPrivateKey): + raise ValueError("Non-RSA keys are not supported.") + def sign(self, data, digest): """Sign string containing data.""" with reraise_errors('Unable to sign data: {0!r}'): diff --git a/t/unit/security/__init__.py b/t/unit/security/__init__.py index feec8ba4d97..1e8befe9afa 100644 --- a/t/unit/security/__init__.py +++ b/t/unit/security/__init__.py @@ -105,3 +105,33 @@ e+zYdEdkFCd8rp568Eiwkq/553uy4rlE927/AEqs/+KGYmAtibk/9vmi+/+iZXyS WWZybzzDZFncq1/N1C3Y/hrCBNDFO4TsnTLAhWtZ4c0vDAiacw== -----END CERTIFICATE-----""" + +CERT_ECDSA = """-----BEGIN CERTIFICATE----- +MIIDTTCCATWgAwIBAgIBCTANBgkqhkiG9w0BAQsFADANMQswCQYDVQQGEwJGSTAe +Fw0yMjA4MDQwOTA5MDlaFw0yNTA0MzAwOTA5MDlaMCMxCzAJBgNVBAYTAkZJMRQw +EgYDVQQDDAtUZXN0IFNlcnZlcjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABIZV +GFM0uPbXehT55s2yq3Zd7tCvN6GMGpE2+KSZqTtDP5c7x23QvBYF6q/T8MLNWCSB +TxaERpvt8XL+ksOZ8vSjbTBrMB0GA1UdDgQWBBRiY7qDBo7KAYJIn3qTMGAkPimO +6TAyBgNVHSMEKzApoRGkDzANMQswCQYDVQQGEwJGSYIUN/TljutVzZQ8GAMSX8yl +Fy9dO/8wCQYDVR0TBAIwADALBgNVHQ8EBAMCBaAwDQYJKoZIhvcNAQELBQADggIB +AKADv8zZvq8TWtvEZSmf476u+sdxs1hROqqSSJ0M3ePJq2lJ+MGI60eeU/0AyDRt +Q5XAjr2g9wGY3sbA9uYmsIc2kaF+urrUbeoGB1JstALoxviGuM0EzEf+wK5/EbyA +DDMg9j7b51CBMb3FjkiUQgOjM/u5neYpFxF0awXm4khThdOKTFd0FLVX+mcaKPZ4 +dkLcM/0NL25896DBPN982ObHOVqQjtY3sunXVuyeky8rhKmDvpasYu9xRkzSJBp7 +sCPnY6nsCexVICbuI+Q9oNT98YjHipDHQU0U/k/MvK7K/UCY2esKAnxzcOqoMQhi +UjsKddXQ29GUEA9Btn9QB1sp39cR75S8/mFN2f2k/LhNm8j6QeHB4MhZ5L2H68f3 +K2wjzQHMZUrKXf3UM00VbT8E9j0FQ7qjYa7ZnQScvhTqsak2e0um8tqcPyk4WD6l +/gRrLpk8l4x/Qg6F16hdj1p5xOsCUcVDkhIdKf8q3ZXjU2OECYPCFVOwiDQ2ngTf +Se/bcjxgYXBQ99rkEf0vxk47KqC2ZBJy5enUxqUeVbbqho46vJagMzJoAmzp7yFP +c1g8aazOWLD2kUxcqkUn8nv2HqApfycddz2O7OJ5Hl8e4vf+nVliuauGzImo0fiK +VOL9+/r5Kek0fATRWdL4xtbB7zlk+EuoP9T5ZoTYlf14 +-----END CERTIFICATE-----""" + +KEY_ECDSA = """-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIOj98rAhc4ToQkHby+Iegvhm3UBx+3TwpfNza+2Vn8d7oAoGCCqGSM49 +AwEHoUQDQgAEhlUYUzS49td6FPnmzbKrdl3u0K83oYwakTb4pJmpO0M/lzvHbdC8 +FgXqr9Pwws1YJIFPFoRGm+3xcv6Sw5ny9A== +-----END EC PRIVATE KEY-----""" diff --git a/t/unit/security/test_certificate.py b/t/unit/security/test_certificate.py index d9f525dad25..241527f82df 100644 --- a/t/unit/security/test_certificate.py +++ b/t/unit/security/test_certificate.py @@ -8,7 +8,7 @@ from celery.security.certificate import Certificate, CertStore, FSCertStore from t.unit import conftest -from . import CERT1, CERT2, KEY1 +from . import CERT1, CERT2, CERT_ECDSA, KEY1 from .case import SecurityCase @@ -29,6 +29,8 @@ def test_invalid_certificate(self): Certificate(CERT1[:20] + CERT1[21:]) with pytest.raises(SecurityError): Certificate(KEY1) + with pytest.raises(SecurityError): + Certificate(CERT_ECDSA) @pytest.mark.skip('TODO: cert expired') def test_has_expired(self): diff --git a/t/unit/security/test_key.py b/t/unit/security/test_key.py index ffa52925bde..eb60ed43999 100644 --- a/t/unit/security/test_key.py +++ b/t/unit/security/test_key.py @@ -5,7 +5,7 @@ from celery.security.key import PrivateKey from celery.security.utils import get_digest_algorithm -from . import CERT1, ENCKEY1, ENCKEY2, KEY1, KEY2, KEYPASSWORD +from . import CERT1, ENCKEY1, ENCKEY2, KEY1, KEY2, KEY_ECDSA, KEYPASSWORD from .case import SecurityCase @@ -32,9 +32,14 @@ def test_invalid_private_key(self): PrivateKey(ENCKEY2, KEYPASSWORD+b"wrong") with pytest.raises(SecurityError): PrivateKey(CERT1) + with pytest.raises(SecurityError): + PrivateKey(KEY_ECDSA) def test_sign(self): pkey = PrivateKey(KEY1) pkey.sign(ensure_bytes('test'), get_digest_algorithm()) with pytest.raises(AttributeError): pkey.sign(ensure_bytes('test'), get_digest_algorithm('unknown')) + + # pkey = PrivateKey(KEY_ECDSA) + # pkey.sign(ensure_bytes('test'), get_digest_algorithm()) From 3db7c9dde9a4d5aa9c0eda8c43a219de1baa9f02 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 14 Aug 2022 20:05:57 +0300 Subject: [PATCH 0173/1051] Added integration test test_chord_header_id_duplicated_on_rabbitmq_msg_duplication() (#7692) When a task that predates a chord in a chain was duplicated by Rabbitmq (for whatever reason), the chord header id was not duplicated. This caused the chord header to have a different id. This test ensures that the chord header's id preserves itself in face of such an edge case. --- .github/workflows/python-package.yml | 2 +- celery/canvas.py | 4 +- pyproject.toml | 2 +- requirements/dev.txt | 3 +- t/integration/tasks.py | 6 ++ t/integration/test_canvas.py | 112 ++++++++++++++++++++++++++- tox.ini | 5 +- 7 files changed, 124 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index cf4afb9b00a..11def86b454 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -83,7 +83,7 @@ jobs: fail-fast: false matrix: python-version: ['3.7', '3.8', '3.9', '3.10'] - toxenv: ['redis', 'rabbitmq'] + toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] services: redis: diff --git a/celery/canvas.py b/celery/canvas.py index 6207a73da41..8b851fef3a8 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -605,7 +605,7 @@ def reprcall(self, *args, **kwargs): def __deepcopy__(self, memo): memo[id(self)] = self - return dict(self) + return dict(self) # TODO: Potential bug of being a shallow copy def __invert__(self): return self.apply_async().get() @@ -1687,7 +1687,7 @@ def apply_async(self, args=None, kwargs=None, task_id=None, body = body.clone(**options) app = self._get_app(body) tasks = (self.tasks.clone() if isinstance(self.tasks, group) - else group(self.tasks, app=app)) + else group(self.tasks, app=app, task_id=self.options.get('task_id', uuid()))) if app.conf.task_always_eager: with allow_join_result(): return self.apply(args, kwargs, diff --git a/pyproject.toml b/pyproject.toml index e4ac5e78960..d637cb79f1a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ addopts = "--strict-markers" testpaths = "t/unit/" python_classes = "test_*" xfail_strict=true -markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module"] +markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module", "flaky", "timeout"] [tool.mypy] warn_unused_configs = true diff --git a/requirements/dev.txt b/requirements/dev.txt index 8d28a2924cf..fbc54e32a4e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -2,4 +2,5 @@ pytz>dev git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git -vine>=5.0.0 \ No newline at end of file +vine>=5.0.0 +isort~=5.10.1 diff --git a/t/integration/tasks.py b/t/integration/tasks.py index dcb9d6575f8..64f9512f4b6 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -241,6 +241,12 @@ def redis_echo(message, redis_key="redis-echo"): redis_connection.rpush(redis_key, message) +@shared_task(bind=True) +def redis_echo_group_id(self, _, redis_key="redis-group-ids"): + redis_connection = get_redis_connection() + redis_connection.rpush(redis_key, self.request.group) + + @shared_task def redis_count(redis_key="redis-count"): """Task that increments a specified or well-known redis key.""" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index a88d14cba0b..2cf7affa9f9 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -12,15 +12,16 @@ from celery.backends.base import BaseKeyValueStoreBackend from celery.exceptions import ImproperlyConfigured, TimeoutError from celery.result import AsyncResult, GroupResult, ResultSet +from celery.signals import before_task_publish from . import tasks from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection from .tasks import (ExpectedException, add, add_chord_to_chord, add_replaced, add_to_all, add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, print_unicode, - raise_error, redis_count, redis_echo, replace_with_chain, replace_with_chain_which_raises, - replace_with_empty_chain, retry_once, return_exception, return_priority, second_order_replace1, - tsum, write_to_file_and_return_int, xsum) + raise_error, redis_count, redis_echo, redis_echo_group_id, replace_with_chain, + replace_with_chain_which_raises, replace_with_empty_chain, retry_once, return_exception, + return_priority, second_order_replace1, tsum, write_to_file_and_return_int, xsum) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) @@ -62,12 +63,36 @@ def await_redis_echo(expected_msgs, redis_key="redis-echo", timeout=TIMEOUT): ) retrieved_key, msg = maybe_key_msg assert retrieved_key.decode("utf-8") == redis_key - expected_msgs[msg] -= 1 # silently accepts unexpected messages + expected_msgs[msg] -= 1 # silently accepts unexpected messages # There should be no more elements - block momentarily assert redis_connection.blpop(redis_key, min(1, timeout)) is None +def await_redis_list_message_length(expected_length, redis_key="redis-group-ids", timeout=TIMEOUT): + """ + Helper to wait for a specified or well-known redis key to contain a string. + """ + sleep(1) + redis_connection = get_redis_connection() + + check_interval = 0.1 + check_max = int(timeout / check_interval) + + for i in range(check_max + 1): + length = redis_connection.llen(redis_key) + + if length == expected_length: + break + + sleep(check_interval) + else: + raise TimeoutError(f'{redis_key!r} has length of {length}, but expected to be of length {expected_length}') + + sleep(min(1, timeout)) + assert redis_connection.llen(redis_key) == expected_length + + def await_redis_count(expected_count, redis_key="redis-count", timeout=TIMEOUT): """ Helper to wait for a specified or well-known redis key to count to a value. @@ -95,6 +120,13 @@ def await_redis_count(expected_count, redis_key="redis-count", timeout=TIMEOUT): assert int(redis_connection.get(redis_key)) == expected_count +def compare_group_ids_in_redis(redis_key='redis-group-ids'): + redis_connection = get_redis_connection() + actual = redis_connection.lrange(redis_key, 0, -1) + assert len(actual) >= 2, 'Expected at least 2 group ids in redis' + assert actual[0] == actual[1], 'Expected group ids to be equal' + + class test_link_error: @flaky def test_link_error_eager(self): @@ -754,6 +786,78 @@ def test_chain_child_replaced_with_chain_last(self, manager): res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == 42 + @pytest.mark.parametrize('redis_key', ['redis-group-ids']) + def test_chord_header_id_duplicated_on_rabbitmq_msg_duplication(self, manager, subtests, celery_session_app, + redis_key): + """ + When a task that predates a chord in a chain was duplicated by Rabbitmq (for whatever reason), + the chord header id was not duplicated. This caused the chord header to have a different id. + This test ensures that the chord header's id preserves itself in face of such an edge case. + To validate the correct behavior is implemented, we collect the original and duplicated chord header ids + in redis, to ensure that they are the same. + """ + + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if manager.app.conf.broker_url.startswith('redis'): + raise pytest.xfail('Redis broker does not duplicate the task (t1)') + + # Republish t1 to cause the chain to be executed twice + @before_task_publish.connect + def before_task_publish_handler(sender=None, body=None, exchange=None, routing_key=None, headers=None, + properties=None, + declare=None, retry_policy=None, **kwargs): + """ We want to republish t1 to ensure that the chain is executed twice """ + + metadata = { + 'body': body, + 'exchange': exchange, + 'routing_key': routing_key, + 'properties': properties, + 'headers': headers, + } + + with celery_session_app.producer_pool.acquire(block=True) as producer: + # Publish t1 to the message broker, just before it's going to be published which causes duplication + return producer.publish( + metadata['body'], + exchange=metadata['exchange'], + routing_key=metadata['routing_key'], + retry=None, + retry_policy=retry_policy, + serializer='json', + delivery_mode=None, + headers=headers, + **kwargs + ) + + # Clean redis key + redis_connection = get_redis_connection() + if redis_connection.exists(redis_key): + redis_connection.delete(redis_key) + + # Prepare tasks + t1, t2, t3, t4 = identity.s(42), redis_echo_group_id.s(), identity.s(), identity.s() + c = chain(t1, chord([t2, t3], t4)) + + # Delay chain + r1 = c.delay() + r1.get(timeout=TIMEOUT) + + # Cleanup + before_task_publish.disconnect(before_task_publish_handler) + + with subtests.test(msg='Compare group ids via redis list'): + await_redis_list_message_length(2, redis_key=redis_key, timeout=15) + compare_group_ids_in_redis(redis_key=redis_key) + + # Cleanup + redis_connection = get_redis_connection() + redis_connection.delete(redis_key) + class test_result_set: diff --git a/tox.ini b/tox.ini index bb456a64e8f..3e4be9020c7 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,7 @@ requires = tox-gh-actions envlist = {3.7,3.8,3.9,3.10,pypy3}-unit - {3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} + {3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} flake8 apicheck @@ -64,6 +64,9 @@ setenv = redis: TEST_BROKER=redis:// redis: TEST_BACKEND=redis:// + rabbitmq_redis: TEST_BROKER=pyamqp:// + rabbitmq_redis: TEST_BACKEND=redis:// + dynamodb: TEST_BROKER=redis:// dynamodb: TEST_BACKEND=dynamodb://@localhost:8000 dynamodb: AWS_ACCESS_KEY_ID=test_aws_key_id From d4146ebd5afa7c7078da68be48df1c089d202e62 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 21 Aug 2022 12:03:24 +0300 Subject: [PATCH 0174/1051] Use tuple instead of list for DEFAULT_ACCEPT_CONTENT. --- celery/app/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 29ce4ee77f6..b5a869e1c77 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -10,7 +10,7 @@ DEFAULT_POOL = 'prefork' -DEFAULT_ACCEPT_CONTENT = ['json'] +DEFAULT_ACCEPT_CONTENT = ('json',) DEFAULT_PROCESS_LOG_FMT = """ [%(asctime)s: %(levelname)s/%(processName)s] %(message)s """.strip() From 7b585138af8318d62b8fe7086df7e85d110ac786 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 22 Aug 2022 13:24:17 +0300 Subject: [PATCH 0175/1051] New feature flag: allow_error_cb_on_chord_header - allowing setting an error callback on chord header (#7712) * Added a new flag for linking an error callback to a chord header. The purpose of the new flag is to allow a chord header failure to prevent the chord body from executing while executing the error callback on both the header and the body (in case of failure). This differs from the default behavior where the chord header failure DOES NOT prevent the body from executing nor executing an error callback on the header. * Added new flag under task namespace: allow_error_cb_on_chord_header (turned off by default) * Added integration test to confirm flag works: test_flag_allow_error_cb_on_chord_header() * Added unit test to confirm flag works: test_flag_allow_error_cb_on_chord_header() * Added documentation to task_allow_error_cb_on_chord_header flag * Documentation fixes Co-authored-by: Omer Katz * Add deprecation pending message. * Created enabled/disable integration tests for task_allow_error_cb_on_chord_header flag * Corrected documentation * Linter issue fix * English typo fix Co-authored-by: Omer Katz Co-authored-by: Omer Katz --- celery/app/defaults.py | 1 + celery/canvas.py | 21 ++++- docs/userguide/calling.rst | 3 + docs/userguide/canvas.rst | 3 + docs/userguide/configuration.rst | 41 ++++++++++ t/integration/test_canvas.py | 134 +++++++++++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 32 ++++++++ 7 files changed, 234 insertions(+), 1 deletion(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index b5a869e1c77..ce8d0ae1a90 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -291,6 +291,7 @@ def __repr__(self): ), store_errors_even_if_ignored=Option(False, type='bool'), track_started=Option(False, type='bool'), + allow_error_cb_on_chord_header=Option(False, type='bool'), ), worker=Namespace( __old__=OLD_NS_WORKER, diff --git a/celery/canvas.py b/celery/canvas.py index 8b851fef3a8..086f191aab5 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -7,6 +7,7 @@ import itertools import operator +import warnings from abc import ABCMeta, abstractmethod from collections import deque from collections.abc import MutableSequence @@ -22,6 +23,7 @@ from vine import barrier from celery._state import current_app +from celery.exceptions import CPendingDeprecationWarning from celery.result import GroupResult, allow_join_result from celery.utils import abstract from celery.utils.collections import ChainMap @@ -1612,7 +1614,7 @@ def __call__(self, body=None, **options): def __or__(self, other): if (not isinstance(other, (group, _chain)) and - isinstance(other, Signature)): + isinstance(other, Signature)): # chord | task -> attach to body sig = self.clone() sig.body = sig.body | other @@ -1808,6 +1810,23 @@ def link(self, callback): return callback def link_error(self, errback): + if self.app.conf.task_allow_error_cb_on_chord_header: + # self.tasks can be a list of the chord header workflow. + if isinstance(self.tasks, list): + for task in self.tasks: + task.link_error(errback) + else: + self.tasks.link_error(errback) + else: + warnings.warn( + "task_allow_error_cb_on_chord_header=False is pending deprecation in " + "a future release of Celery.\n" + "Please test the new behavior by setting task_allow_error_cb_on_chord_header to True " + "and report any concerns you might have in our issue tracker before we make a final decision " + "regarding how errbacks should behave when used with chords.", + CPendingDeprecationWarning + ) + self.body.link_error(errback) return errback diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 06f0879c5cb..30894849098 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -167,6 +167,9 @@ The callbacks/errbacks will then be called in order, and all callbacks will be called with the return value of the parent task as a partial argument. +In the case of a chord, we can handle errors using multiple handling strategies. +See :ref:`chord error handling ` for more information. + .. _calling-on-message: On message diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 5904ef98807..9e72f55f2f7 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -943,6 +943,9 @@ Doing so will link the provided signature to the chord's body which can be expected to gracefully invoke callbacks just once upon completion of the body, or errbacks just once if any task in the chord header or body fails. +This behavior can be manipulated to allow error handling of the chord header using the :ref:`task_allow_error_cb_on_chord_header ` flag. +Enabling this flag will cause the chord header to invoke the errback for the body (default behavior) *and* any task in the chord's header that fails. + .. _chord-important-notes: Important Notes diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index ebe9c968664..ea21fee49b4 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -151,6 +151,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_TASK_TRACK_STARTED`` :setting:`task_track_started` ``CELERY_TASK_REJECT_ON_WORKER_LOST`` :setting:`task_reject_on_worker_lost` ``CELERYD_TIME_LIMIT`` :setting:`task_time_limit` +``CELERY_ALLOW_ERROR_CB_ON_CHORD_HEADER`` :setting:`task_allow_error_cb_on_chord_header` ``CELERYD_AGENT`` :setting:`worker_agent` ``CELERYD_AUTOSCALER`` :setting:`worker_autoscaler` ``CELERYD_CONCURRENCY`` :setting:`worker_concurrency` @@ -511,6 +512,46 @@ Default: No time limit. Task hard time limit in seconds. The worker processing the task will be killed and replaced with a new one when this is exceeded. +.. setting:: task_allow_error_cb_on_chord_header + +``task_allow_error_cb_on_chord_header`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.3 + +Default: Disabled. + +Enabling this flag will allow linking an error callback to a chord header, +which by default will not link when using :code:`link_error()`, and preventing +from the chord's body to execute if any of the tasks in the header fails. + +Consider the following canvas with the flag disabled (default behavior): + +.. code-block:: python + + header = group([t1, t2]) + body = t3 + c = chord(header, body) + c.link_error(error_callback_sig) + +If *any* of the header tasks failed (:code:`t1` or :code:`t2`), by default, the chord body (:code:`t3`) would **not execute**, and :code:`error_callback_sig` will be called **once** (for the body). + +Enabling this flag will change the above behavior by: + +1. :code:`error_callback_sig` will be linked to :code:`t1` and :code:`t2` (as well as :code:`t3`). +2. If *any* of the header tasks failed, :code:`error_callback_sig` will be called **for each** failed header task **and** the :code:`body` (even if the body didn't run). + +Consider now the following canvas with the flag enabled: + +.. code-block:: python + + header = group([failingT1, failingT2]) + body = t3 + c = chord(header, body) + c.link_error(error_callback_sig) + +If *all* of the header tasks failed (:code:`failingT1` and :code:`failingT2`), then the chord body (:code:`t3`) would **not execute**, and :code:`error_callback_sig` will be called **3 times** (two times for the header and one time for the body). + .. setting:: task_soft_time_limit ``task_soft_time_limit`` diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 2cf7affa9f9..184036714ef 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2638,6 +2638,140 @@ def test_chord_body_chain_child_replaced_with_chain_last(self, manager): res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == [42] + def test_enabling_flag_allow_error_cb_on_chord_header(self, manager, subtests): + """ + Test that the flag allow_error_callback_on_chord_header works as + expected. To confirm this, we create a chord with a failing header + task, and check that the body does not execute when the header task fails. + This allows preventing the body from executing when the chord header fails + when the flag is turned on. In addition, we make sure the body error callback + is also executed when the header fails and the flag is turned on. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + redis_connection = get_redis_connection() + + manager.app.conf.task_allow_error_cb_on_chord_header = True + + header_errback_msg = 'header errback called' + header_errback_key = 'echo_header_errback' + header_errback_sig = redis_echo.si(header_errback_msg, redis_key=header_errback_key) + + body_errback_msg = 'body errback called' + body_errback_key = 'echo_body_errback' + body_errback_sig = redis_echo.si(body_errback_msg, redis_key=body_errback_key) + + body_msg = 'chord body called' + body_key = 'echo_body' + body_sig = redis_echo.si(body_msg, redis_key=body_key) + + headers = ( + (fail.si(),), + (fail.si(), fail.si(), fail.si()), + (fail.si(), identity.si(42)), + (fail.si(), identity.si(42), identity.si(42)), + (fail.si(), identity.si(42), fail.si()), + (fail.si(), identity.si(42), fail.si(), identity.si(42)), + (fail.si(), identity.si(42), fail.si(), identity.si(42), fail.si()), + ) + + # for some reason using parametrize breaks the test so we do it manually unfortunately + for header in headers: + chord_sig = chord(header, body_sig) + # link error to chord header ONLY + [header_task.link_error(header_errback_sig) for header_task in chord_sig.tasks] + # link error to chord body ONLY + chord_sig.body.link_error(body_errback_sig) + redis_connection.delete(header_errback_key, body_errback_key, body_key) + + with subtests.test(msg='Error propagates from failure in header'): + res = chord_sig.delay() + with pytest.raises(ExpectedException): + res.get(timeout=TIMEOUT) + + with subtests.test(msg='Confirm the body was not executed'): + with pytest.raises(TimeoutError): + # confirm the chord body was not called + await_redis_echo((body_msg,), redis_key=body_key, timeout=10) + # Double check + assert not redis_connection.exists(body_key), 'Chord body was called when it should have not' + + with subtests.test(msg='Confirm the errback was called for each failed header task + body'): + # confirm the errback was called for each task in the chord header + failed_header_tasks_count = len(list(filter(lambda f_sig: f_sig == fail.si(), header))) + expected_header_errbacks = tuple(header_errback_msg for _ in range(failed_header_tasks_count)) + await_redis_echo(expected_header_errbacks, redis_key=header_errback_key) + + # confirm the errback was called for the chord body + await_redis_echo((body_errback_msg,), redis_key=body_errback_key) + + redis_connection.delete(header_errback_key, body_errback_key) + + def test_disabling_flag_allow_error_cb_on_chord_header(self, manager, subtests): + """ + Confirm that when allow_error_callback_on_chord_header is disabled, the default + behavior is kept. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + redis_connection = get_redis_connection() + + manager.app.conf.task_allow_error_cb_on_chord_header = False + + errback_msg = 'errback called' + errback_key = 'echo_errback' + errback_sig = redis_echo.si(errback_msg, redis_key=errback_key) + + body_msg = 'chord body called' + body_key = 'echo_body' + body_sig = redis_echo.si(body_msg, redis_key=body_key) + + headers = ( + (fail.si(),), + (fail.si(), fail.si(), fail.si()), + (fail.si(), identity.si(42)), + (fail.si(), identity.si(42), identity.si(42)), + (fail.si(), identity.si(42), fail.si()), + (fail.si(), identity.si(42), fail.si(), identity.si(42)), + (fail.si(), identity.si(42), fail.si(), identity.si(42), fail.si()), + ) + + # for some reason using parametrize breaks the test so we do it manually unfortunately + for header in headers: + chord_sig = chord(header, body_sig) + chord_sig.link_error(errback_sig) + redis_connection.delete(errback_key, body_key) + + with subtests.test(msg='Error propagates from failure in header'): + res = chord_sig.delay() + with pytest.raises(ExpectedException): + res.get(timeout=TIMEOUT) + + with subtests.test(msg='Confirm the body was not executed'): + with pytest.raises(TimeoutError): + # confirm the chord body was not called + await_redis_echo((body_msg,), redis_key=body_key, timeout=10) + # Double check + assert not redis_connection.exists(body_key), 'Chord body was called when it should have not' + + with subtests.test(msg='Confirm only one errback was called'): + await_redis_echo((errback_msg,), redis_key=errback_key, timeout=10) + with pytest.raises(TimeoutError): + await_redis_echo((errback_msg,), redis_key=errback_key, timeout=10) + + # Cleanup + redis_connection.delete(errback_key) + class test_signature_serialization: """ diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 677cb190b3d..092f24be13a 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -2042,6 +2042,38 @@ def test_chord_clone_kwargs(self, subtests): new_kw.update(override_kw) assert c3.kwargs == new_kw + def test_flag_allow_error_cb_on_chord_header(self, subtests): + header_mock = [Mock(name='t1'), Mock(name='t2')] + header = group(header_mock) + body = Mock(name='tbody') + errback_sig = Mock(name='errback_sig') + chord_sig = chord(header, body, app=self.app) + + with subtests.test(msg='Verify the errback is not linked'): + # header + for child_sig in header_mock: + child_sig.link_error.assert_not_called() + # body + body.link_error.assert_not_called() + + with subtests.test(msg='Verify flag turned off links only the body'): + self.app.conf.task_allow_error_cb_on_chord_header = False + chord_sig.link_error(errback_sig) + # header + for child_sig in header_mock: + child_sig.link_error.assert_not_called() + # body + body.link_error.assert_called_once_with(errback_sig) + + with subtests.test(msg='Verify flag turned on links the header'): + self.app.conf.task_allow_error_cb_on_chord_header = True + chord_sig.link_error(errback_sig) + # header + for child_sig in header_mock: + child_sig.link_error.assert_called_once_with(errback_sig) + # body + body.link_error.assert_has_calls([call(errback_sig), call(errback_sig)]) + class test_maybe_signature(CanvasCase): From adf2a00ed76e5e12dea2a4c704e60f869d8aa980 Mon Sep 17 00:00:00 2001 From: Andre Pereira Date: Thu, 18 Aug 2022 10:06:40 -0300 Subject: [PATCH 0176/1051] Update README.rst sorting Python/Celery versions --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 33ddcf75c7c..f3767f79bfd 100644 --- a/README.rst +++ b/README.rst @@ -68,11 +68,11 @@ This is the version of celery which will support Python 3.7 or newer. If you're running an older version of Python, you need to be running an older version of Celery: +- Python 3.6: Celery 5.1 or earlier. +- Python 2.7: Celery 4.x series. - Python 2.6: Celery series 3.1 or earlier. - Python 2.5: Celery series 3.0 or earlier. - Python 2.4: Celery series 2.2 or earlier. -- Python 2.7: Celery 4.x series. -- Python 3.6: Celery 5.1 or earlier. Celery is a project with minimal funding, so we don't support Microsoft Windows. From e6e0cd72ac49b7968f4557d5fc6a2665093e4cd6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 24 Aug 2022 14:13:29 +0300 Subject: [PATCH 0177/1051] Fixed a bug where stamping a chord body would not use the correct stamping method (#7722) --- celery/canvas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index 086f191aab5..1f450971014 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -176,7 +176,7 @@ def on_chord_body(self, chord, **header) -> dict: Returns: Dict: headers to update. """ - return self.on_signature(chord.body, **header) + return {} class GroupStampingVisitor(StampingVisitor): From f4628639ca39be4d0aea39ec1c2f066117529316 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 24 Aug 2022 17:54:44 +0300 Subject: [PATCH 0178/1051] Fixed doc duplication typo for Signature.stamp() (#7725) --- celery/canvas.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index 1f450971014..e890198e5bf 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -496,7 +496,9 @@ def set_immutable(self, immutable): self.immutable = immutable def stamp(self, visitor=None, **headers): - """Apply this task asynchronously. + """Stamp this signature with additional custom headers. + Using a visitor will pass on responsibility for the stamping + to the visitor. Arguments: visitor (StampingVisitor): Visitor API object. From 018f996fe2f80ed2e69ffd93bd9b35dfa41d1141 Mon Sep 17 00:00:00 2001 From: woutdenolf Date: Thu, 25 Aug 2022 10:48:33 +0200 Subject: [PATCH 0179/1051] initialize all variables which are used in the `finally` block --- celery/contrib/testing/worker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index c72dc0e4006..bf24b30b1c8 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -72,6 +72,7 @@ def start_worker( """ test_worker_starting.send(sender=app) + worker = None try: with _start_worker_thread(app, concurrency=concurrency, From fbae71ca2bc2eb68988131f5719a1dc5807d58fd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 30 Aug 2022 06:51:54 +0300 Subject: [PATCH 0180/1051] Fixed bug in chord stamping with another chord as a body + unit test (#7730) * Fixed bug in chord stamping with another chord as a body + unit test * Added support to Python 3.7 for xprod task in the canvas tests * Update t/unit/tasks/test_canvas.py Added #TODO for removing the Python 3.7 newly added support patch when the support would be dropped entirely for 3.7 Co-authored-by: Omer Katz Co-authored-by: Omer Katz --- celery/canvas.py | 19 +++++++---------- t/unit/tasks/test_canvas.py | 42 ++++++++++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 13 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index e890198e5bf..707d93a4572 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -91,7 +91,6 @@ class StampingVisitor(metaclass=ABCMeta): a canvas primitive override method that represents it. """ - @abstractmethod def on_group_start(self, group, **headers) -> dict: """Method that is called on group stamping start. @@ -101,7 +100,7 @@ def on_group_start(self, group, **headers) -> dict: Returns: Dict: headers to update. """ - pass + return {} def on_group_end(self, group, **headers) -> None: """Method that is called on group stamping end. @@ -112,7 +111,6 @@ def on_group_end(self, group, **headers) -> None: """ pass - @abstractmethod def on_chain_start(self, chain, **headers) -> dict: """Method that is called on chain stamping start. @@ -122,7 +120,7 @@ def on_chain_start(self, chain, **headers) -> dict: Returns: Dict: headers to update. """ - pass + return {} def on_chain_end(self, chain, **headers) -> None: """Method that is called on chain stamping end. @@ -196,14 +194,11 @@ def on_group_start(self, group, **headers) -> dict: if group.id not in self.groups: self.groups.append(group.id) - return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} + return super().on_group_start(group, **headers) def on_group_end(self, group, **headers) -> None: self.groups.pop() - def on_chain_start(self, chain, **headers) -> dict: - return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} - def on_signature(self, sig, **headers) -> dict: return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} @@ -1658,10 +1653,6 @@ def freeze(self, _id=None, group_id=None, chord=None, return body_result def stamp(self, visitor=None, **headers): - if visitor is not None and self.body is not None: - headers.update(visitor.on_chord_body(self, **headers)) - self.body.stamp(visitor=visitor, **headers) - if visitor is not None: headers.update(visitor.on_chord_header_start(self, **headers)) super().stamp(visitor=visitor, **headers) @@ -1679,6 +1670,10 @@ def stamp(self, visitor=None, **headers): if visitor is not None: visitor.on_chord_header_end(self, **headers) + if visitor is not None and self.body is not None: + headers.update(visitor.on_chord_body(self, **headers)) + self.body.stamp(visitor=visitor, **headers) + def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, publisher=None, connection=None, router=None, result_cls=None, **options): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 092f24be13a..bc25b7408ee 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -88,7 +88,15 @@ def replace_with_chain(self, x, y): @self.app.task(shared=False) def xprod(numbers): - return math.prod(numbers) + try: + return math.prod(numbers) + except AttributeError: + # TODO: Drop this backport once + # we drop support for Python 3.7 + import operator + from functools import reduce + + return reduce(operator.mul, numbers) self.xprod = xprod @@ -1591,6 +1599,38 @@ def test_chord_stamping_body_group(self, subtests): with subtests.test("prod_task_res is stamped", groups=[body.id]): assert prod_task_res._get_task_meta()['groups'] == [body.id] + def test_chord_stamping_body_chord(self, subtests): + """ + In the case of chord within a chord that is from another canvas + element, ensure that chord stamps are added correctly when chord are + run in parallel. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + parent_header_tasks = [self.add.s(i, i) for i in range(10)] + + sum_task = self.xsum.s() + sum_task_res = sum_task.freeze() + sum_task2 = self.xsum.s() + sum_task_res2 = sum_task2.freeze() + prod_task = self.xprod.s() + prod_task_res = sum_task.freeze() + + body = chord(group(sum_task, prod_task), sum_task2, app=self.app) + + g = chord(parent_header_tasks, body, app=self.app) + g.freeze() + g.apply() + + with subtests.test("sum_task_res is stamped", groups=[body.id]): + assert sum_task_res._get_task_meta()['groups'] == [body.id] + with subtests.test("prod_task_res is stamped", groups=[body.id]): + assert prod_task_res._get_task_meta()['groups'] == [body.id] + with subtests.test("sum_task_res2 is NOT stamped", groups=[]): + assert len(sum_task_res2._get_task_meta()['groups']) == 0 + def test__get_app_does_not_exhaust_generator(self): def build_generator(): yield self.add.s(1, 1) From 876cc92590c3e4e77a363e0344dae7dc5f4aea29 Mon Sep 17 00:00:00 2001 From: maxfirman Date: Wed, 31 Aug 2022 18:43:32 +0100 Subject: [PATCH 0181/1051] Use "describe_table" not "create_table" to check for existence of DynamoDB table (#7734) * Use "describe_table" not "create_table" to check for existence of DynamoDB table * fix tests Co-authored-by: Firman, Max --- celery/backends/dynamodb.py | 33 +++++++++++++---------------- t/unit/backends/test_dynamodb.py | 36 ++++++++++++++------------------ 2 files changed, 31 insertions(+), 38 deletions(-) diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index 7c2f1ca5b39..fbc8bcf160e 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -201,28 +201,25 @@ def _get_or_create_table(self): """Create table if not exists, otherwise return the description.""" table_schema = self._get_table_schema() try: - table_description = self._client.create_table(**table_schema) - logger.info( - 'DynamoDB Table {} did not exist, creating.'.format( - self.table_name - ) - ) - # In case we created the table, wait until it becomes available. - self._wait_for_table_status('ACTIVE') - logger.info( - 'DynamoDB Table {} is now available.'.format( - self.table_name - ) - ) - return table_description + return self._client.describe_table(TableName=self.table_name) except ClientError as e: error_code = e.response['Error'].get('Code', 'Unknown') - # If table exists, do not fail, just return the description. - if error_code == 'ResourceInUseException': - return self._client.describe_table( - TableName=self.table_name + if error_code == 'ResourceNotFoundException': + table_description = self._client.create_table(**table_schema) + logger.info( + 'DynamoDB Table {} did not exist, creating.'.format( + self.table_name + ) + ) + # In case we created the table, wait until it becomes available. + self._wait_for_table_status('ACTIVE') + logger.info( + 'DynamoDB Table {} is now available.'.format( + self.table_name + ) ) + return table_description else: raise e diff --git a/t/unit/backends/test_dynamodb.py b/t/unit/backends/test_dynamodb.py index 6fd2625c0cb..a27af96d6ff 100644 --- a/t/unit/backends/test_dynamodb.py +++ b/t/unit/backends/test_dynamodb.py @@ -121,39 +121,34 @@ def test_get_client_time_to_live_called( mock_set_table_ttl.assert_called_once() def test_get_or_create_table_not_exists(self): + from botocore.exceptions import ClientError + self.backend._client = MagicMock() mock_create_table = self.backend._client.create_table = MagicMock() + client_error = ClientError( + { + 'Error': { + 'Code': 'ResourceNotFoundException' + } + }, + 'DescribeTable' + ) mock_describe_table = self.backend._client.describe_table = \ MagicMock() - - mock_describe_table.return_value = { - 'Table': { - 'TableStatus': 'ACTIVE' - } - } + mock_describe_table.side_effect = client_error + self.backend._wait_for_table_status = MagicMock() self.backend._get_or_create_table() + mock_describe_table.assert_called_once_with( + TableName=self.backend.table_name + ) mock_create_table.assert_called_once_with( **self.backend._get_table_schema() ) def test_get_or_create_table_already_exists(self): - from botocore.exceptions import ClientError - self.backend._client = MagicMock() mock_create_table = self.backend._client.create_table = MagicMock() - client_error = ClientError( - { - 'Error': { - 'Code': 'ResourceInUseException', - 'Message': 'Table already exists: {}'.format( - self.backend.table_name - ) - } - }, - 'CreateTable' - ) - mock_create_table.side_effect = client_error mock_describe_table = self.backend._client.describe_table = \ MagicMock() @@ -167,6 +162,7 @@ def test_get_or_create_table_already_exists(self): mock_describe_table.assert_called_once_with( TableName=self.backend.table_name ) + mock_create_table.assert_not_called() def test_wait_for_table_status(self): self.backend._client = MagicMock() From afe0c2354bf61745d70df7b7005667e4f9ae64f6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 6 Sep 2022 12:57:52 +0300 Subject: [PATCH 0182/1051] Added test for task_allow_error_cb_on_chord_header flag with an upgraded chord input (#7744) --- docs/userguide/configuration.rst | 11 ++++++ t/integration/test_canvas.py | 64 ++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index ea21fee49b4..b798aaa4ce6 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -552,6 +552,17 @@ Consider now the following canvas with the flag enabled: If *all* of the header tasks failed (:code:`failingT1` and :code:`failingT2`), then the chord body (:code:`t3`) would **not execute**, and :code:`error_callback_sig` will be called **3 times** (two times for the header and one time for the body). +Lastly, consider the following canvas with the flag enabled: + +.. code-block:: python + + header = group([failingT1, failingT2]) + body = t3 + upgraded_chord = chain(header, body) + upgraded_chord.link_error(error_callback_sig) + +This canvas will behave exactly the same as the previous one, since the :code:`chain` will be upgraded to a :code:`chord` internally. + .. setting:: task_soft_time_limit ``task_soft_time_limit`` diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 184036714ef..33ed392944b 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2772,6 +2772,70 @@ def test_disabling_flag_allow_error_cb_on_chord_header(self, manager, subtests): # Cleanup redis_connection.delete(errback_key) + def test_flag_allow_error_cb_on_chord_header_on_upgraded_chord(self, manager, subtests): + """ + Confirm that allow_error_callback_on_chord_header flag supports upgraded chords + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + redis_connection = get_redis_connection() + + manager.app.conf.task_allow_error_cb_on_chord_header = True + + errback_msg = 'errback called' + errback_key = 'echo_errback' + errback_sig = redis_echo.si(errback_msg, redis_key=errback_key) + + body_msg = 'chord body called' + body_key = 'echo_body' + body_sig = redis_echo.si(body_msg, redis_key=body_key) + + headers = ( + # (fail.si(),), <-- this is not supported because it's not a valid chord header (only one task) + (fail.si(), fail.si(), fail.si()), + (fail.si(), identity.si(42)), + (fail.si(), identity.si(42), identity.si(42)), + (fail.si(), identity.si(42), fail.si()), + (fail.si(), identity.si(42), fail.si(), identity.si(42)), + (fail.si(), identity.si(42), fail.si(), identity.si(42), fail.si()), + ) + + # for some reason using parametrize breaks the test so we do it manually unfortunately + for header in headers: + implicit_chord_sig = chain(group(list(header)), body_sig) + implicit_chord_sig.link_error(errback_sig) + redis_connection.delete(errback_key, body_key) + + with subtests.test(msg='Error propagates from failure in header'): + res = implicit_chord_sig.delay() + with pytest.raises(ExpectedException): + res.get(timeout=TIMEOUT) + + with subtests.test(msg='Confirm the body was not executed'): + with pytest.raises(TimeoutError): + # confirm the chord body was not called + await_redis_echo((body_msg,), redis_key=body_key, timeout=10) + # Double check + assert not redis_connection.exists(body_key), 'Chord body was called when it should have not' + + with subtests.test(msg='Confirm the errback was called for each failed header task + body'): + # confirm the errback was called for each task in the chord header + failed_header_tasks_count = len(list(filter(lambda f_sig: f_sig.name == fail.si().name, header))) + expected_errbacks_count = failed_header_tasks_count + 1 # +1 for the body + expected_errbacks = tuple(errback_msg for _ in range(expected_errbacks_count)) + await_redis_echo(expected_errbacks, redis_key=errback_key) + + # confirm there are not leftovers + assert not redis_connection.exists(errback_key) + + # Cleanup + redis_connection.delete(errback_key) + class test_signature_serialization: """ From aecbeda4e8de7ac6817106b91a1e88b8515db66e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 7 Sep 2022 13:48:57 +0300 Subject: [PATCH 0183/1051] Improved custom stamping visitor documentation (#7745) --- docs/userguide/canvas.rst | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 9e72f55f2f7..b8db4c315b6 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1201,7 +1201,7 @@ pattern. The class that implements this custom logic must inherit ``VisitorStamping`` and implement appropriate methods. For example, the following example ``InGroupVisitor`` will label -tasks that are in side of some group by lable ``in_group``. +tasks that are in side of some group by label ``in_group``. .. code-block:: python @@ -1221,3 +1221,33 @@ tasks that are in side of some group by lable ``in_group``. def on_signature(self, sig, **headers) -> dict: return {"in_group": [self.in_group], "stamped_headers": ["in_group"]} + +The following example shows another custom stamping visitor, which labels all +tasks with a custom ``monitoring_id`` which can represent a UUID value of an external monitoring system, +that can be used to track the task execution by including the id with such a visitor implementation. +This ``monitoring_id`` can be a randomly generated UUID, or a unique identifier of the span id used by +the external monitoring system. + +.. code-block:: python + + class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': uuid4(), 'stamped_headers': ['monitoring_id']} + +Next, lets see how to use the ``MonitoringIdStampingVisitor`` stamping visitor. + +.. code-block:: python + + sig_example = signature('t1') + sig_example.stamp(visitor=MonitoringIdStampingVisitor()) + + group_example = group([signature('t1'), signature('t2')]) + group_example.stamp(visitor=MonitoringIdStampingVisitor()) + + chord_example = chord([signature('t1'), signature('t2')], signature('t3')) + chord_example.stamp(visitor=MonitoringIdStampingVisitor()) + + chain_example = chain(signature('t1'), group(signature('t2'), signature('t3')), signature('t4')) + chain_example.stamp(visitor=MonitoringIdStampingVisitor()) + +Lastly, it's important to mention that each monitoring id stamp in the example above would be different from each other between tasks. \ No newline at end of file From b547032dc4f044f391f0f8c400de84e45cb2de9a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 7 Sep 2022 17:52:35 +0300 Subject: [PATCH 0184/1051] Improved the coverage of test_chord_stamping_body_chord() --- t/unit/tasks/test_canvas.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index bc25b7408ee..f27e8b196f6 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1609,7 +1609,8 @@ def test_chord_stamping_body_chord(self, subtests): self.app.conf.task_store_eager_result = True self.app.conf.result_extended = True - parent_header_tasks = [self.add.s(i, i) for i in range(10)] + parent_header_tasks = group([self.add.s(i, i) for i in range(10)]) + parent_header_tasks_res = parent_header_tasks.freeze() sum_task = self.xsum.s() sum_task_res = sum_task.freeze() @@ -1620,14 +1621,20 @@ def test_chord_stamping_body_chord(self, subtests): body = chord(group(sum_task, prod_task), sum_task2, app=self.app) - g = chord(parent_header_tasks, body, app=self.app) - g.freeze() - g.apply() + c = chord(parent_header_tasks, body, app=self.app) + c.freeze() + c.apply() + with subtests.test("parent_header_tasks are stamped", groups=[c.id]): + for ar in parent_header_tasks_res.children: + assert ar._get_task_meta()['groups'] == [c.id] + assert ar._get_task_meta()['groups'] != [body.id] with subtests.test("sum_task_res is stamped", groups=[body.id]): assert sum_task_res._get_task_meta()['groups'] == [body.id] + assert sum_task_res._get_task_meta()['groups'] != [c.id] with subtests.test("prod_task_res is stamped", groups=[body.id]): assert prod_task_res._get_task_meta()['groups'] == [body.id] + assert prod_task_res._get_task_meta()['groups'] != [c.id] with subtests.test("sum_task_res2 is NOT stamped", groups=[]): assert len(sum_task_res2._get_task_meta()['groups']) == 0 From 43fde840982300ab47719bcc85ccfdaaf18f57ce Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Sep 2022 14:21:06 +0600 Subject: [PATCH 0185/1051] billiard >= 3.6.3.0,<5.0 for rpm --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 91641248bc2..e594f5e5c2e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,7 @@ per-file-ignores = [bdist_rpm] requires = pytz >= 2016.7 - billiard >= 3.6.3.0,<4.0 + billiard >= 3.6.3.0,<5.0 kombu >= 5.2.1,<6.0.0 [bdist_wheel] From 777698c746e4d1aa8af0a7974b0559bf3b86b14a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Sep 2022 16:25:19 +0300 Subject: [PATCH 0186/1051] [pre-commit.ci] pre-commit autoupdate (#7625) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/asottile/pyupgrade: v2.34.0 → v2.38.0](https://github.com/asottile/pyupgrade/compare/v2.34.0...v2.38.0) - [github.com/PyCQA/flake8: 4.0.1 → 5.0.4](https://github.com/PyCQA/flake8/compare/4.0.1...5.0.4) - [github.com/asottile/yesqa: v1.3.0 → v1.4.0](https://github.com/asottile/yesqa/compare/v1.3.0...v1.4.0) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * autopep8 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Omer Katz --- .pre-commit-config.yaml | 6 +++--- t/unit/app/test_app.py | 2 +- t/unit/concurrency/test_eventlet.py | 2 +- t/unit/conftest.py | 2 +- t/unit/tasks/test_canvas.py | 2 +- t/unit/utils/test_deprecated.py | 4 ++-- t/unit/utils/test_local.py | 8 ++++---- 7 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1cac64fbef2..5cf9180a77c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,17 +1,17 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.34.0 + rev: v2.38.0 hooks: - id: pyupgrade args: ["--py37-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 5.0.4 hooks: - id: flake8 - repo: https://github.com/asottile/yesqa - rev: v1.3.0 + rev: v1.4.0 hooks: - id: yesqa diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 0402c3bc3fc..04fcaebf0b3 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -742,7 +742,7 @@ def test_get_active_apps(self): appid = id(app1) assert app1 in _state._get_active_apps() app1.close() - del(app1) + del (app1) gc.collect() diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py index aff2d310368..b6a46d95ceb 100644 --- a/t/unit/concurrency/test_eventlet.py +++ b/t/unit/concurrency/test_eventlet.py @@ -29,7 +29,7 @@ def teardown(self): for mod in [mod for mod in sys.modules if mod.startswith('eventlet')]: try: - del(sys.modules[mod]) + del (sys.modules[mod]) except KeyError: pass diff --git a/t/unit/conftest.py b/t/unit/conftest.py index 26b0e42d9ff..e742a5c3ccc 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -547,7 +547,7 @@ def __getattr__(self, attr): sys.modules[name] = prev[name] except KeyError: try: - del(sys.modules[name]) + del (sys.modules[name]) except KeyError: pass diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index f27e8b196f6..2b9fcf946ee 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -18,7 +18,7 @@ 'subtask_type': ''}, ) try: - from collections import Iterable + from collections.abc import Iterable except ImportError: from collections.abc import Iterable diff --git a/t/unit/utils/test_deprecated.py b/t/unit/utils/test_deprecated.py index ed2255785d0..5b303eb274b 100644 --- a/t/unit/utils/test_deprecated.py +++ b/t/unit/utils/test_deprecated.py @@ -40,7 +40,7 @@ def foo(self): description='foo', removal=None, ) warn.reset_mock() - del(x.foo) + del (x.foo) warn.assert_called_with( stacklevel=3, deprecation='1.2', alternative=None, description='foo', removal=None, @@ -57,7 +57,7 @@ def foo(self): with pytest.raises(AttributeError): x.foo = 10 with pytest.raises(AttributeError): - del(x.foo) + del (x.foo) class test_warn: diff --git a/t/unit/utils/test_local.py b/t/unit/utils/test_local.py index 621a77595b2..ac02c075c45 100644 --- a/t/unit/utils/test_local.py +++ b/t/unit/utils/test_local.py @@ -110,7 +110,7 @@ def __dir__(self): setattr(x, 'a', 10) assert x.a == 10 - del(x.a) + del (x.a) assert x.a == 1 def test_dictproxy(self): @@ -120,7 +120,7 @@ def test_dictproxy(self): assert x['foo'] == 42 assert len(x) == 1 assert 'foo' in x - del(x['foo']) + del (x['foo']) with pytest.raises(KeyError): x['foo'] assert iter(x) @@ -132,7 +132,7 @@ def test_listproxy(self): x.extend([2, 3, 4]) assert x[0] == 1 assert x[:-1] == [1, 2, 3] - del(x[-1]) + del (x[-1]) assert x[:-1] == [1, 2] x[0] = 10 assert x[0] == 10 @@ -140,7 +140,7 @@ def test_listproxy(self): assert len(x) == 3 assert iter(x) x[0:2] = [1, 2] - del(x[0:2]) + del (x[0:2]) assert str(x) def test_complex_cast(self): From 34533ab44d2a6492004bc3df44dc04ad5c6611e7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 21 Sep 2022 16:49:10 +0300 Subject: [PATCH 0187/1051] Fixed memory leak with worker_cancel_long_running_tasks_on_connection_loss handling of connection error (#7771) --- celery/worker/consumer/consumer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 2aeccff2111..6dd93ba7e57 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -31,7 +31,7 @@ from celery.utils.text import truncate from celery.utils.time import humanize_seconds, rate from celery.worker import loops -from celery.worker.state import active_requests, maybe_shutdown, reserved_requests, task_reserved +from celery.worker.state import active_requests, maybe_shutdown, requests, reserved_requests, task_reserved __all__ = ('Consumer', 'Evloop', 'dump_body') @@ -444,6 +444,9 @@ def on_close(self): for bucket in self.task_buckets.values(): if bucket: bucket.clear_pending() + for request_id in reserved_requests: + if request_id in requests: + del requests[request_id] reserved_requests.clear() if self.pool and self.pool.flush: self.pool.flush() From 392f7034eae52438fdb30bb2c6ec61746acb3722 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 22 Sep 2022 20:15:19 +0300 Subject: [PATCH 0188/1051] Fixed bug where a chord with header of type `tuple` was not supported in the link_error flow for task_allow_error_cb_on_chord_header flag (#7772) --- celery/canvas.py | 2 +- t/unit/tasks/test_canvas.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index 707d93a4572..475ee3c61df 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1809,7 +1809,7 @@ def link(self, callback): def link_error(self, errback): if self.app.conf.task_allow_error_cb_on_chord_header: # self.tasks can be a list of the chord header workflow. - if isinstance(self.tasks, list): + if isinstance(self.tasks, (list, tuple)): for task in self.tasks: task.link_error(errback) else: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 2b9fcf946ee..33626f097c3 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -2121,6 +2121,21 @@ def test_flag_allow_error_cb_on_chord_header(self, subtests): # body body.link_error.assert_has_calls([call(errback_sig), call(errback_sig)]) + @pytest.mark.usefixtures('depends_on_current_app') + def test_flag_allow_error_cb_on_chord_header_various_header_types(self): + """ Test chord link_error with various header types. """ + self.app.conf.task_allow_error_cb_on_chord_header = True + headers = [ + signature('t'), + [signature('t'), signature('t')], + group(signature('t'), signature('t')) + ] + for chord_header in headers: + c = chord(chord_header, signature('t')) + sig = signature('t') + errback = c.link_error(sig) + assert errback == sig + class test_maybe_signature(CanvasCase): From 7a7f48300c55d9a4a72a4916a7caa82427d1ef58 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sat, 24 Sep 2022 00:47:23 -0500 Subject: [PATCH 0189/1051] Scheduled weekly dependency update for week 38 (#7767) * Update mypy from 0.961 to 0.971 * Pin cryptography to latest version 38.0.1 * Pin elasticsearch to latest version 8.4.1 * Update pylibmc from 1.6.1 to 1.6.3 * Update pycurl from 7.43.0.5 to 7.45.1 * elasticsearch<8.0 * pycurl==7.43.0.5 Co-authored-by: Asif Saif Uddin --- requirements/extras/auth.txt | 2 +- requirements/extras/memcache.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 7973b0677a7..859fab375df 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography~=37.0.1 +cryptography==38.0.1 diff --git a/requirements/extras/memcache.txt b/requirements/extras/memcache.txt index c6122cbd4a2..2d1d02f6124 100644 --- a/requirements/extras/memcache.txt +++ b/requirements/extras/memcache.txt @@ -1 +1 @@ -pylibmc==1.6.1; platform_system != "Windows" +pylibmc==1.6.3; platform_system != "Windows" diff --git a/requirements/test.txt b/requirements/test.txt index d23cbf8270c..03181ec84aa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ pytest-click==1.1.0 boto3>=1.9.178 moto>=2.2.6 # typing extensions -mypy==0.961; platform_python_implementation=="CPython" +mypy==0.971; platform_python_implementation=="CPython" pre-commit==2.20.0 -r extras/yaml.txt -r extras/msgpack.txt From 6ea687bf50f57db1dd6fe76cda81d340c73c2901 Mon Sep 17 00:00:00 2001 From: skshetry Date: Sat, 24 Sep 2022 11:34:47 +0545 Subject: [PATCH 0190/1051] recreate_module: set __spec__ to the new module (#7773) * recreate_module: set __spec__ to the new module * test: add test for celery import --- celery/local.py | 1 + t/unit/utils/test_local.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+) diff --git a/celery/local.py b/celery/local.py index c2dd8444ed9..7bbe6151de2 100644 --- a/celery/local.py +++ b/celery/local.py @@ -517,6 +517,7 @@ def recreate_module(name, compat_modules=None, by_module=None, direct=None, new_module.__dict__.update({ mod: get_compat_module(new_module, mod) for mod in compat_modules }) + new_module.__spec__ = old_module.__spec__ return old_module, new_module diff --git a/t/unit/utils/test_local.py b/t/unit/utils/test_local.py index ac02c075c45..f2c0fea0c00 100644 --- a/t/unit/utils/test_local.py +++ b/t/unit/utils/test_local.py @@ -1,3 +1,5 @@ +import sys +from importlib.util import find_spec from unittest.mock import Mock import pytest @@ -339,3 +341,15 @@ def test_maybe_evaluate(self): assert maybe_evaluate(30) == 30 assert x.__evaluated__() + + +class test_celery_import: + def test_import_celery(self, monkeypatch): + monkeypatch.delitem(sys.modules, "celery", raising=False) + spec = find_spec("celery") + assert spec + + import celery + + assert celery.__spec__ == spec + assert find_spec("celery") == spec From 4f54119b822374533934943fa41ba68011295489 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 26 Sep 2022 17:00:50 +0000 Subject: [PATCH 0191/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.38.0 → v2.38.2](https://github.com/asottile/pyupgrade/compare/v2.38.0...v2.38.2) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5cf9180a77c..a3aaf55e000 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.38.0 + rev: v2.38.2 hooks: - id: pyupgrade args: ["--py37-plus"] From 7c1dcd03e065a681377d13e63ece7d910691174e Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 28 Sep 2022 18:40:50 +0300 Subject: [PATCH 0192/1051] Override integration test config using integration-tests-config.json (#7778) * Override integration test config using integration-tests-config.json * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: Omer Katz Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .gitignore | 3 ++- t/integration/conftest.py | 14 ++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 4f206fdb28c..b821c3f1dd8 100644 --- a/.gitignore +++ b/.gitignore @@ -32,4 +32,5 @@ coverage.xml test.db pip-wheel-metadata/ .python-version -.vscode/ \ No newline at end of file +.vscode/ +integration-tests-config.json diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 5dc6b0cae11..34a705b2be5 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -1,3 +1,4 @@ +import json import os import pytest @@ -30,8 +31,8 @@ def get_active_redis_channels(): @pytest.fixture(scope='session') -def celery_config(): - return { +def celery_config(request): + config = { 'broker_url': TEST_BROKER, 'result_backend': TEST_BACKEND, 'cassandra_servers': ['localhost'], @@ -41,6 +42,15 @@ def celery_config(): 'cassandra_write_consistency': 'ONE', 'result_extended': True } + try: + # To override the default configuration, create the integration-tests-config.json file + # in Celery's root directory. + # The file must contain a dictionary of valid configuration name/value pairs. + config_overrides = json.load(open(str(request.config.rootdir / "integration-tests-config.json"))) + config.update(config_overrides) + except OSError: + pass + return config @pytest.fixture(scope='session') From 629bc63cb516031fdbe360b69de9b60fbe3a2034 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 6 Oct 2022 09:52:21 +0300 Subject: [PATCH 0193/1051] Fixed error handling bugs due to upgrade to a newer version of billiard (#7781) * Bump Billiard to 4.0.2 * Defaults are already installed so pip reports a conflict * Fixed error handling bugs due to upgrade to a newer version of billiard Co-authored-by: Omer Katz --- celery/app/task.py | 4 +++- celery/worker/request.py | 19 ++++++++++++++----- requirements/default.txt | 2 +- t/unit/utils/test_collections.py | 4 ++-- t/unit/worker/test_request.py | 8 ++++---- tox.ini | 1 - 6 files changed, 24 insertions(+), 14 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 212bc772e01..d6108fbef8c 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -1,7 +1,7 @@ """Task implementation: request context and the task base class.""" import sys -from billiard.einfo import ExceptionInfo +from billiard.einfo import ExceptionInfo, ExceptionWithTraceback from kombu import serialization from kombu.exceptions import OperationalError from kombu.utils.uuid import uuid @@ -813,6 +813,8 @@ def apply(self, args=None, kwargs=None, retval = ret.retval if isinstance(retval, ExceptionInfo): retval, tb = retval.exception, retval.traceback + if isinstance(retval, ExceptionWithTraceback): + retval = retval.exc if isinstance(retval, Retry) and retval.sig is not None: return retval.sig.apply(retries=retries + 1) state = states.SUCCESS if ret.info is None else ret.info.state diff --git a/celery/worker/request.py b/celery/worker/request.py index d89971468c6..d0004a19ccc 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -10,6 +10,7 @@ from weakref import ref from billiard.common import TERM_SIGNAME +from billiard.einfo import ExceptionWithTraceback from kombu.utils.encoding import safe_repr, safe_str from kombu.utils.objects import cached_property @@ -511,8 +512,11 @@ def on_success(self, failed__retval__runtime, **kwargs): """Handler called if the task was successfully processed.""" failed, retval, runtime = failed__retval__runtime if failed: - if isinstance(retval.exception, (SystemExit, KeyboardInterrupt)): - raise retval.exception + exc = retval.exception + if isinstance(exc, ExceptionWithTraceback): + exc = exc.exc + if isinstance(exc, (SystemExit, KeyboardInterrupt)): + raise exc return self.on_failure(retval, return_ok=True) task_ready(self, successful=True) @@ -535,6 +539,9 @@ def on_failure(self, exc_info, send_failed_event=True, return_ok=False): task_ready(self) exc = exc_info.exception + if isinstance(exc, ExceptionWithTraceback): + exc = exc.exc + is_terminated = isinstance(exc, Terminated) if is_terminated: # If the task was terminated and the task was not cancelled due @@ -735,9 +742,11 @@ def execute_using_pool(self, pool, **kwargs): def on_success(self, failed__retval__runtime, **kwargs): failed, retval, runtime = failed__retval__runtime if failed: - if isinstance(retval.exception, ( - SystemExit, KeyboardInterrupt)): - raise retval.exception + exc = retval.exception + if isinstance(exc, ExceptionWithTraceback): + exc = exc.exc + if isinstance(exc, (SystemExit, KeyboardInterrupt)): + raise exc return self.on_failure(retval, return_ok=True) task_ready(self) diff --git a/requirements/default.txt b/requirements/default.txt index 5a076c8ffad..d4a2e01daeb 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ pytz>=2021.3 -billiard>=3.6.4.0,<5.0 +billiard>=4.0.2,<5.0 kombu>=5.3.0b1,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 diff --git a/t/unit/utils/test_collections.py b/t/unit/utils/test_collections.py index ce776cebf1a..aae685ebc7c 100644 --- a/t/unit/utils/test_collections.py +++ b/t/unit/utils/test_collections.py @@ -145,8 +145,8 @@ def test_exception_info(self): except Exception: einfo = ExceptionInfo() assert str(einfo) == einfo.traceback - assert isinstance(einfo.exception, LookupError) - assert einfo.exception.args == ('The quick brown fox jumps...',) + assert isinstance(einfo.exception.exc, LookupError) + assert einfo.exception.exc.args == ('The quick brown fox jumps...',) assert einfo.traceback assert repr(einfo) diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index a34f70dc80d..b818f2837cc 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -155,7 +155,7 @@ def test_execute_jail_failure(self): self.app, uuid(), self.mytask_raising.name, {}, [4], {}, ) assert isinstance(ret, ExceptionInfo) - assert ret.exception.args == (4,) + assert ret.exception.exc.args == (4,) def test_execute_task_ignore_result(self): @self.app.task(shared=False, ignore_result=True) @@ -385,7 +385,7 @@ def test_on_failure_WorkerLostError_redelivered_True(self): task_failure, sender=req.task, task_id=req.id, - exception=einfo.exception, + exception=einfo.exception.exc, args=req.args, kwargs=req.kwargs, traceback=einfo.traceback, @@ -394,7 +394,7 @@ def test_on_failure_WorkerLostError_redelivered_True(self): req.on_failure(einfo) req.task.backend.mark_as_failure.assert_called_once_with(req.id, - einfo.exception, + einfo.exception.exc, request=req._context, store_result=True) @@ -807,7 +807,7 @@ def test_from_message_invalid_kwargs(self): m = self.TaskMessage(self.mytask.name, args=(), kwargs='foo') req = Request(m, app=self.app) with pytest.raises(InvalidTaskError): - raise req.execute().exception + raise req.execute().exception.exc def test_on_hard_timeout_acks_late(self, patching): error = patching('celery.worker.request.error') diff --git a/tox.ini b/tox.ini index 3e4be9020c7..2820e656884 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,6 @@ passenv = AZUREBLOCKBLOB_URL deps= - -r{toxinidir}/requirements/default.txt -r{toxinidir}/requirements/test.txt -r{toxinidir}/requirements/pkgutils.txt From c5a797c7f42db8ee5807c7f59777c33d75d221b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrgen=20Gmach?= Date: Thu, 6 Oct 2022 09:55:45 +0200 Subject: [PATCH 0194/1051] Do not recommend using easy_install anymore (#7789) It has been deprecated since 2019: https://setuptools.pypa.io/en/latest/history.html#v42-0-0 --- docs/getting-started/first-steps-with-celery.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst index 12222e5c223..2637851d3a3 100644 --- a/docs/getting-started/first-steps-with-celery.rst +++ b/docs/getting-started/first-steps-with-celery.rst @@ -106,7 +106,7 @@ Installing Celery ================= Celery is on the Python Package Index (PyPI), so it can be installed -with standard Python tools like ``pip`` or ``easy_install``: +with standard Python tools like ``pip``: .. code-block:: console From 88a031634b03210bffa417b41ec0bd8cf0876ba0 Mon Sep 17 00:00:00 2001 From: Alex Date: Thu, 6 Oct 2022 11:32:42 +0300 Subject: [PATCH 0195/1051] GitHub Workflows security hardening (#7768) * build: harden python-package.yml permissions Signed-off-by: Alex * build: harden post_release_to_hacker_news.yml permissions Signed-off-by: Alex --- .github/workflows/post_release_to_hacker_news.yml | 1 + .github/workflows/python-package.yml | 3 +++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/post_release_to_hacker_news.yml b/.github/workflows/post_release_to_hacker_news.yml index dddbb3c52af..c21287558bd 100644 --- a/.github/workflows/post_release_to_hacker_news.yml +++ b/.github/workflows/post_release_to_hacker_news.yml @@ -2,6 +2,7 @@ on: release: types: [released] +permissions: {} jobs: post_release_to_hacker_news: runs-on: ubuntu-latest diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 11def86b454..df76966793a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -19,6 +19,9 @@ on: - '**.toml' - '.github/workflows/python-package.yml' +permissions: + contents: read # to fetch code (actions/checkout) + jobs: Unit: From 42902edbe300e72d186b3ef0a5cf32d70f5fbeb5 Mon Sep 17 00:00:00 2001 From: Zhong Zheng Date: Fri, 26 Aug 2022 18:38:22 +1000 Subject: [PATCH 0196/1051] update ambiguous acks_late doc --- celery/app/task.py | 2 +- docs/history/changelog-1.0.rst | 2 +- docs/userguide/configuration.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index d6108fbef8c..e3c0fcf0ac1 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -252,7 +252,7 @@ class Task: track_started = None #: When enabled messages for this task will be acknowledged **after** - #: the task has been executed, and not *just before* (the + #: the task has been executed, and not *right before* (the #: default behavior). #: #: Please note that this means the task may be executed twice if the diff --git a/docs/history/changelog-1.0.rst b/docs/history/changelog-1.0.rst index 3ff2053ab9a..3579727f89f 100644 --- a/docs/history/changelog-1.0.rst +++ b/docs/history/changelog-1.0.rst @@ -164,7 +164,7 @@ News * New task option: `Task.acks_late` (default: :setting:`CELERY_ACKS_LATE`) Late ack means the task messages will be acknowledged **after** the task - has been executed, not *just before*, which is the default behavior. + has been executed, not *right before*, which is the default behavior. .. note:: diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index b798aaa4ce6..3fa48f70233 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -595,7 +595,7 @@ clean up before the hard time limit comes: Default: Disabled. Late ack means the task messages will be acknowledged **after** the task -has been executed, not *just before* (the default behavior). +has been executed, not *right before* (the default behavior). .. seealso:: From 74208af1092d13479472e5bc8b2419b7c1c0040e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 17:04:30 +0000 Subject: [PATCH 0197/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v2.38.2 → v3.0.0](https://github.com/asottile/pyupgrade/compare/v2.38.2...v3.0.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a3aaf55e000..7a5fe2c0c88 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v2.38.2 + rev: v3.0.0 hooks: - id: pyupgrade args: ["--py37-plus"] From b0127577c692371ea1fc9b3f39137c7c3da76c84 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 12 Oct 2022 12:34:54 +0600 Subject: [PATCH 0198/1051] billiard >=4.0.2,<5.0 (#7720) * billiard >= 3.6.3.0,<5.0 * billiard>=4.0.2,<5.0 * billiard >=4.0.2,<5.0 * billiard >=4.0.2,<5.0 --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index e594f5e5c2e..465f266dba7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,7 @@ per-file-ignores = [bdist_rpm] requires = pytz >= 2016.7 - billiard >= 3.6.3.0,<5.0 + billiard >=4.0.2,<5.0 kombu >= 5.2.1,<6.0.0 [bdist_wheel] From a4545dbd549c7666bec561376ec75e484da8b475 Mon Sep 17 00:00:00 2001 From: woutdenolf Date: Wed, 12 Oct 2022 08:38:51 +0200 Subject: [PATCH 0199/1051] importlib_metadata remove deprecated entry point interfaces (#7785) * importlib_metadata removed deprecated entry point interfaces * importlib-metadata usage requires 3.6 --- celery/bin/celery.py | 12 +++++++++++- celery/utils/imports.py | 9 ++++++++- requirements/default.txt | 2 +- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index 2aee6414be4..65f53f37390 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -1,6 +1,7 @@ """Celery Command Line Interface.""" import os import pathlib +import sys import traceback try: @@ -75,7 +76,16 @@ def convert(self, value, param, ctx): APP = App() -@with_plugins(entry_points().get('celery.commands', [])) +if sys.version_info >= (3, 10): + _PLUGINS = entry_points(group='celery.commands') +else: + try: + _PLUGINS = entry_points().get('celery.commands', []) + except AttributeError: + _PLUGINS = entry_points().select(group='celery.commands') + + +@with_plugins(_PLUGINS) @click.group(cls=DYMGroup, invoke_without_command=True) @click.option('-A', '--app', diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 60f11e8316f..390b22ce894 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -141,7 +141,14 @@ def gen_task_name(app, name, module_name): def load_extension_class_names(namespace): - for ep in entry_points().get(namespace, []): + if sys.version_info >= (3, 10): + _entry_points = entry_points(group=namespace) + else: + try: + _entry_points = entry_points().get(namespace, []) + except AttributeError: + _entry_points = entry_points().select(group=namespace) + for ep in _entry_points: yield ep.name, ep.value diff --git a/requirements/default.txt b/requirements/default.txt index d4a2e01daeb..ba30d7d31e8 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -6,4 +6,4 @@ click>=8.1.2,<9.0 click-didyoumean>=0.3.0 click-repl>=0.2.0 click-plugins>=1.1.1 -importlib-metadata>=1.4.0; python_version < '3.8' +importlib-metadata>=3.6; python_version < '3.8' From df7ffc46638c7ee9305a64433b8a1f1ff4e48b66 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 12 Oct 2022 01:54:55 -0500 Subject: [PATCH 0200/1051] Scheduled weekly dependency update for week 41 (#7798) * Update mypy from 0.971 to 0.982 * Pin elasticsearch to latest version 8.4.3 * Update pytest-cov from 3.0.0 to 4.0.0 * Update pycurl from 7.43.0.5 to 7.45.1 * elasticsearch<8.0 * pycurl==7.43.0.5 Co-authored-by: Asif Saif Uddin --- requirements/test-ci-base.txt | 2 +- requirements/test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index efe082c33e5..59f10491d65 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,4 @@ -pytest-cov==3.0.0 +pytest-cov==4.0.0 pytest-github-actions-annotate-failures==0.1.7 codecov==2.1.12 -r extras/redis.txt diff --git a/requirements/test.txt b/requirements/test.txt index 03181ec84aa..d2d87888e41 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ pytest-click==1.1.0 boto3>=1.9.178 moto>=2.2.6 # typing extensions -mypy==0.971; platform_python_implementation=="CPython" +mypy==0.982; platform_python_implementation=="CPython" pre-commit==2.20.0 -r extras/yaml.txt -r extras/msgpack.txt From 64f7e8917424d1142a63a3a84d958481b88a1617 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 12 Oct 2022 13:04:06 +0600 Subject: [PATCH 0201/1051] pyzmq>=22.3.0 (#7497) --- requirements/extras/zeromq.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/zeromq.txt b/requirements/extras/zeromq.txt index d34ee102466..3b730d16946 100644 --- a/requirements/extras/zeromq.txt +++ b/requirements/extras/zeromq.txt @@ -1 +1 @@ -pyzmq>=13.1.0 +pyzmq>=22.3.0 From bc312e5031741a66c3126acf8b4673432eda73f7 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Fri, 14 Oct 2022 16:10:04 +0200 Subject: [PATCH 0202/1051] Update backends.py --- celery/app/backends.py | 1 - 1 file changed, 1 deletion(-) diff --git a/celery/app/backends.py b/celery/app/backends.py index ab40ccaed9f..5481528f0c8 100644 --- a/celery/app/backends.py +++ b/celery/app/backends.py @@ -13,7 +13,6 @@ """ BACKEND_ALIASES = { - 'amqp': 'celery.backends.amqp:AMQPBackend', 'rpc': 'celery.backends.rpc.RPCBackend', 'cache': 'celery.backends.cache:CacheBackend', 'redis': 'celery.backends.redis:RedisBackend', From 651095e3602756237920f4fa7ac170e1322c1939 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sat, 15 Oct 2022 19:38:25 +0200 Subject: [PATCH 0203/1051] Replace print by logger.debug --- celery/utils/functional.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/celery/utils/functional.py b/celery/utils/functional.py index dc40ceb44f9..5fb0d6339e5 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -1,13 +1,17 @@ """Functional-style utilities.""" import inspect -import sys from collections import UserList from functools import partial from itertools import islice, tee, zip_longest +from typing import Any, Callable from kombu.utils.functional import LRUCache, dictfilter, is_list, lazy, maybe_evaluate, maybe_list, memoize from vine import promise +from celery.utils.log import get_logger + +logger = get_logger(__name__) + __all__ = ( 'LRUCache', 'is_list', 'maybe_list', 'memoize', 'mlazy', 'noop', 'first', 'firstmethod', 'chunks', 'padlist', 'mattrgetter', 'uniq', @@ -307,7 +311,7 @@ def _argsfromspec(spec, replace_defaults=True): ])) -def head_from_fun(fun, bound=False, debug=False): +def head_from_fun(fun: Callable[..., Any], bound: bool = False) -> str: """Generate signature function from actual function.""" # we could use inspect.Signature here, but that implementation # is very slow since it implements the argument checking @@ -328,8 +332,7 @@ def head_from_fun(fun, bound=False, debug=False): fun_args=_argsfromspec(inspect.getfullargspec(fun)), fun_value=1, ) - if debug: # pragma: no cover - print(definition, file=sys.stderr) + logger.debug(definition) namespace = {'__name__': fun.__module__} # pylint: disable=exec-used # Tasks are rarely, if ever, created at runtime - exec here is fine. From 213bd38ff719c33dd6315026fcd106178ca81dc8 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 16 Oct 2022 14:38:10 +0200 Subject: [PATCH 0204/1051] Ignore coverage on `except ImportError` --- celery/backends/cassandra.py | 2 +- celery/backends/cosmosdbsql.py | 2 +- celery/backends/database/__init__.py | 2 +- celery/backends/dynamodb.py | 2 +- celery/backends/elasticsearch.py | 2 +- celery/backends/mongodb.py | 4 ++-- celery/backends/redis.py | 2 +- celery/bootsteps.py | 2 +- celery/concurrency/asynpool.py | 2 +- celery/concurrency/gevent.py | 2 +- celery/platforms.py | 2 +- celery/utils/log.py | 6 +++--- celery/utils/threads.py | 4 ++-- celery/worker/components.py | 2 +- celery/worker/worker.py | 2 +- pyproject.toml | 3 ++- t/unit/tasks/test_tasks.py | 2 +- t/unit/utils/test_platforms.py | 2 +- 18 files changed, 23 insertions(+), 22 deletions(-) diff --git a/celery/backends/cassandra.py b/celery/backends/cassandra.py index c80aa5ca040..0eb37f31ba8 100644 --- a/celery/backends/cassandra.py +++ b/celery/backends/cassandra.py @@ -12,7 +12,7 @@ import cassandra.auth import cassandra.cluster import cassandra.query -except ImportError: # pragma: no cover +except ImportError: cassandra = None diff --git a/celery/backends/cosmosdbsql.py b/celery/backends/cosmosdbsql.py index cfe560697a9..e32b13f2e78 100644 --- a/celery/backends/cosmosdbsql.py +++ b/celery/backends/cosmosdbsql.py @@ -14,7 +14,7 @@ from pydocumentdb.documents import ConnectionPolicy, ConsistencyLevel, PartitionKind from pydocumentdb.errors import HTTPFailure from pydocumentdb.retry_options import RetryOptions -except ImportError: # pragma: no cover +except ImportError: pydocumentdb = DocumentClient = ConsistencyLevel = PartitionKind = \ HTTPFailure = ConnectionPolicy = RetryOptions = None diff --git a/celery/backends/database/__init__.py b/celery/backends/database/__init__.py index fb26d552cf9..91080adc46a 100644 --- a/celery/backends/database/__init__.py +++ b/celery/backends/database/__init__.py @@ -15,7 +15,7 @@ try: from sqlalchemy.exc import DatabaseError, InvalidRequestError from sqlalchemy.orm.exc import StaleDataError -except ImportError: # pragma: no cover +except ImportError: raise ImproperlyConfigured( 'The database result backend requires SQLAlchemy to be installed.' 'See https://pypi.org/project/SQLAlchemy/') diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index fbc8bcf160e..90fbae09449 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -12,7 +12,7 @@ try: import boto3 from botocore.exceptions import ClientError -except ImportError: # pragma: no cover +except ImportError: boto3 = ClientError = None __all__ = ('DynamoDBBackend',) diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index c40b15ddec8..544812979c5 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -11,7 +11,7 @@ try: import elasticsearch -except ImportError: # pragma: no cover +except ImportError: elasticsearch = None __all__ = ('ElasticsearchBackend',) diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py index 1833561f530..21f5c89afc6 100644 --- a/celery/backends/mongodb.py +++ b/celery/backends/mongodb.py @@ -12,13 +12,13 @@ try: import pymongo -except ImportError: # pragma: no cover +except ImportError: pymongo = None if pymongo: try: from bson.binary import Binary - except ImportError: # pragma: no cover + except ImportError: from pymongo.binary import Binary from pymongo.errors import InvalidDocument else: # pragma: no cover diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 97e186ec7f7..8acc60831bf 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -24,7 +24,7 @@ try: import redis.connection from kombu.transport.redis import get_redis_error_classes -except ImportError: # pragma: no cover +except ImportError: redis = None get_redis_error_classes = None diff --git a/celery/bootsteps.py b/celery/bootsteps.py index 315426ace31..878560624d1 100644 --- a/celery/bootsteps.py +++ b/celery/bootsteps.py @@ -13,7 +13,7 @@ try: from greenlet import GreenletExit -except ImportError: # pragma: no cover +except ImportError: IGNORE_ERRORS = () else: IGNORE_ERRORS = (GreenletExit,) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index b8087ad3e3c..19715005828 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -47,7 +47,7 @@ from _billiard import read as __read__ readcanbuf = True -except ImportError: # pragma: no cover +except ImportError: def __read__(fd, buf, size, read=os.read): chunk = read(fd, size) diff --git a/celery/concurrency/gevent.py b/celery/concurrency/gevent.py index 33a61bf6198..b0ea7e663f3 100644 --- a/celery/concurrency/gevent.py +++ b/celery/concurrency/gevent.py @@ -7,7 +7,7 @@ try: from gevent import Timeout -except ImportError: # pragma: no cover +except ImportError: Timeout = None __all__ = ('TaskPool',) diff --git a/celery/platforms.py b/celery/platforms.py index 8af1876fde6..abefb459525 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -27,7 +27,7 @@ try: from billiard.process import current_process -except ImportError: # pragma: no cover +except ImportError: current_process = None _setproctitle = try_import('setproctitle') diff --git a/celery/utils/log.py b/celery/utils/log.py index 668094c5ce5..1765a611f45 100644 --- a/celery/utils/log.py +++ b/celery/utils/log.py @@ -264,7 +264,7 @@ def get_multiprocessing_logger(): """Return the multiprocessing logger.""" try: from billiard import util - except ImportError: # pragma: no cover + except ImportError: pass else: return util.get_logger() @@ -274,7 +274,7 @@ def reset_multiprocessing_logger(): """Reset multiprocessing logging setup.""" try: from billiard import util - except ImportError: # pragma: no cover + except ImportError: pass else: if hasattr(util, '_logger'): # pragma: no cover @@ -284,7 +284,7 @@ def reset_multiprocessing_logger(): def current_process(): try: from billiard import process - except ImportError: # pragma: no cover + except ImportError: pass else: return process.current_process() diff --git a/celery/utils/threads.py b/celery/utils/threads.py index 94c6f617c40..d78461a9b72 100644 --- a/celery/utils/threads.py +++ b/celery/utils/threads.py @@ -11,13 +11,13 @@ try: from greenlet import getcurrent as get_ident -except ImportError: # pragma: no cover +except ImportError: try: from _thread import get_ident except ImportError: try: from thread import get_ident - except ImportError: # pragma: no cover + except ImportError: try: from _dummy_thread import get_ident except ImportError: diff --git a/celery/worker/components.py b/celery/worker/components.py index d033872d5ce..f062affb61f 100644 --- a/celery/worker/components.py +++ b/celery/worker/components.py @@ -89,7 +89,7 @@ def _patch_thread_primitives(self, w): # multiprocessing's ApplyResult uses this lock. try: from billiard import pool - except ImportError: # pragma: no cover + except ImportError: pass else: pool.Lock = DummyLock diff --git a/celery/worker/worker.py b/celery/worker/worker.py index c0640120613..04f8c30e10d 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -36,7 +36,7 @@ try: import resource -except ImportError: # pragma: no cover +except ImportError: resource = None diff --git a/pyproject.toml b/pyproject.toml index d637cb79f1a..cc090fe3b63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,5 +22,6 @@ files = [ [tool.coverage.report] exclude_lines = [ "pragma: no cover", - "if TYPE_CHECKING:" + "if TYPE_CHECKING:", + "except ImportError:" ] diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index e23bc4a091f..2300d423976 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -16,7 +16,7 @@ try: from urllib.error import HTTPError -except ImportError: # pragma: no cover +except ImportError: from urllib2 import HTTPError diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index b3c6cf572bf..8ca7c5f845d 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -21,7 +21,7 @@ try: import resource -except ImportError: # pragma: no cover +except ImportError: resource = None From a36800084b2112208c446c3bc7b05bdcbed3bb23 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 16 Oct 2022 11:33:36 +0200 Subject: [PATCH 0205/1051] Add mongodb dependencies to test.txt --- requirements/test.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/test.txt b/requirements/test.txt index d2d87888e41..9fde7200688 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,3 +10,4 @@ mypy==0.982; platform_python_implementation=="CPython" pre-commit==2.20.0 -r extras/yaml.txt -r extras/msgpack.txt +-r extras/mongodb.txt From 13d545b2155ebe9ee0ffad9e9d9ffc09a39185df Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 16 Oct 2022 22:42:21 +0200 Subject: [PATCH 0206/1051] Fix grammar typos on the whole project --- Changelog.rst | 2 +- celery/beat.py | 2 +- celery/canvas.py | 2 +- celery/platforms.py | 2 +- celery/schedules.py | 2 +- docs/history/whatsnew-4.0.rst | 4 ++-- extra/WindowsCMD-AzureWebJob/Celery/run.cmd | 4 ++-- extra/WindowsCMD-AzureWebJob/CeleryBeat/run.cmd | 6 +++--- extra/supervisord/supervisord.conf | 2 +- t/integration/test_inspect.py | 2 +- t/unit/app/test_app.py | 4 ++-- t/unit/events/test_state.py | 2 +- 12 files changed, 17 insertions(+), 17 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 21cdff3978b..fd5634db95e 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -50,7 +50,7 @@ an overview of what's new in Celery 5.2. - fix #7200 uid and gid. - Remove exception-throwing from the signal handler. - Add mypy to the pipeline (#7383). -- Expose more debugging information when receiving unkown tasks. (#7405) +- Expose more debugging information when receiving unknown tasks. (#7405) - Avoid importing buf_t from billiard's compat module as it was removed. - Avoid negating a constant in a loop. (#7443) - Ensure expiration is of float type when migrating tasks (#7385). diff --git a/celery/beat.py b/celery/beat.py index b8f9be23a38..4c9486532e3 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -293,7 +293,7 @@ def is_due(self, entry): return entry.is_due() def _when(self, entry, next_time_to_run, mktime=timegm): - """Return a utc timestamp, make sure heapq in currect order.""" + """Return a utc timestamp, make sure heapq in correct order.""" adjust = self.adjust as_now = maybe_make_aware(entry.default_now()) diff --git a/celery/canvas.py b/celery/canvas.py index 475ee3c61df..a2aedd6334c 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -230,7 +230,7 @@ class Signature(dict): >>> add.s(1, kw=2) - the ``.s()`` shortcut does not allow you to specify execution options - but there's a chaning `.set` method that returns the signature: + but there's a chaining `.set` method that returns the signature: .. code-block:: pycon diff --git a/celery/platforms.py b/celery/platforms.py index abefb459525..d06bbb24f4e 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -186,7 +186,7 @@ def remove(self): def remove_if_stale(self): """Remove the lock if the process isn't running. - I.e. process does not respons to signal. + I.e. process does not respond to signal. """ try: pid = self.read_pid() diff --git a/celery/schedules.py b/celery/schedules.py index ac571fe9d3e..62940132098 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -440,7 +440,7 @@ def _expand_cronspec(cronspec, max_, min_=0): else: raise TypeError(CRON_INVALID_TYPE.format(type=type(cronspec))) - # assure the result does not preceed the min or exceed the max + # assure the result does not precede the min or exceed the max for number in result: if number >= max_ + min_ or number < min_: raise ValueError(CRON_PATTERN_INVALID.format( diff --git a/docs/history/whatsnew-4.0.rst b/docs/history/whatsnew-4.0.rst index 9a80cd6101d..0e1ba1fa278 100644 --- a/docs/history/whatsnew-4.0.rst +++ b/docs/history/whatsnew-4.0.rst @@ -280,7 +280,7 @@ Features removed for simplicity This was an experimental feature, so not covered by our deprecation timeline guarantee. - You can copy and pase the existing batches code for use within your projects: + You can copy and pass the existing batches code for use within your projects: https://github.com/celery/celery/blob/3.1/celery/contrib/batches.py Features removed for lack of funding @@ -1395,7 +1395,7 @@ New Elasticsearch result backend introduced See :ref:`conf-elasticsearch-result-backend` for more information. -To depend on Celery with Elasticsearch as the result bakend use: +To depend on Celery with Elasticsearch as the result backend use: .. code-block:: console diff --git a/extra/WindowsCMD-AzureWebJob/Celery/run.cmd b/extra/WindowsCMD-AzureWebJob/Celery/run.cmd index 9d15f72cd99..b7c830fbdb3 100644 --- a/extra/WindowsCMD-AzureWebJob/Celery/run.cmd +++ b/extra/WindowsCMD-AzureWebJob/Celery/run.cmd @@ -21,11 +21,11 @@ set CELERYD_PID_FILE=%PATH_TO_PROJECT%\log\celery.pid set CELERYD_LOG_FILE=%PATH_TO_PROJECT%\log\celery.log set CELERYD_LOG_LEVEL=INFO -rem You might need to change th path of the Python runing +rem You might need to change th path of the Python running set PYTHONPATH=%PYTHONPATH%;%PATH_TO_PROJECT%; cd %PATH_TO_PROJECT% del %CELERYD_PID_FILE% del %CELERYD_LOG_FILE% -%CELERY_BIN% -A %CELERY_APP% worker --loglevel=%CELERYD_LOG_LEVEL% -P eventlet \ No newline at end of file +%CELERY_BIN% -A %CELERY_APP% worker --loglevel=%CELERYD_LOG_LEVEL% -P eventlet diff --git a/extra/WindowsCMD-AzureWebJob/CeleryBeat/run.cmd b/extra/WindowsCMD-AzureWebJob/CeleryBeat/run.cmd index 7aaa873c15b..6a85b9273ea 100644 --- a/extra/WindowsCMD-AzureWebJob/CeleryBeat/run.cmd +++ b/extra/WindowsCMD-AzureWebJob/CeleryBeat/run.cmd @@ -25,15 +25,15 @@ set CELERYD_PID_FILE=%PATH_TO_PROJECT%\log\celerybeat.pid set CELERYD_LOG_FILE=%PATH_TO_PROJECT%\log\celerybeat.log set CELERYD_LOG_LEVEL=INFO -rem CONFIG RELATED TO THE BEAT +rem CONFIG RELATED TO THE BEAT set CELERYD_DATABASE=django set CELERYD_SCHEDULER=django_celery_beat.schedulers:DatabaseScheduler -rem You might need to change th path of the Python runing +rem You might need to change th path of the Python running set PYTHONPATH=%PYTHONPATH%;%PATH_TO_PROJECT%; cd %PATH_TO_PROJECT% del %CELERYD_PID_FILE% del %CELERYD_LOG_FILE% -%CELERY_BIN% -A %CELERY_APP% beat -S %CELERYD_DATABASE% --logfile=%CELERYD_LOG_FILE% --pidfile=%CELERYD_PID_FILE% --scheduler %CELERYD_SCHEDULER% --loglevel=%CELERYD_LOG_LEVEL% \ No newline at end of file +%CELERY_BIN% -A %CELERY_APP% beat -S %CELERYD_DATABASE% --logfile=%CELERYD_LOG_FILE% --pidfile=%CELERYD_PID_FILE% --scheduler %CELERYD_SCHEDULER% --loglevel=%CELERYD_LOG_LEVEL% diff --git a/extra/supervisord/supervisord.conf b/extra/supervisord/supervisord.conf index 1bde65a7846..ec81f42cfc9 100644 --- a/extra/supervisord/supervisord.conf +++ b/extra/supervisord/supervisord.conf @@ -18,7 +18,7 @@ childlogdir=/var/log/supervisord/ ; where child log files will live supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface [supervisorctl] -serverurl=unix:///tmp/supervisor.sock ; use unix:// schem for a unix sockets. +serverurl=unix:///tmp/supervisor.sock ; use unix:// scheme for a unix sockets. [include] diff --git a/t/integration/test_inspect.py b/t/integration/test_inspect.py index 35b9fead9e1..501cf178d36 100644 --- a/t/integration/test_inspect.py +++ b/t/integration/test_inspect.py @@ -26,7 +26,7 @@ def inspect(manager): class test_Inspect: - """Integration tests fo app.control.inspect() API""" + """Integration tests to app.control.inspect() API""" @flaky def test_ping(self, inspect): diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 04fcaebf0b3..844934b71b1 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1023,7 +1023,7 @@ def test_thread_oid(self): assert oid1 == oid2 def test_backend(self): - # Test that app.bakend returns the same backend in single thread + # Test that app.backend returns the same backend in single thread backend1 = self.app.backend backend2 = self.app.backend assert isinstance(backend1, Backend) @@ -1031,7 +1031,7 @@ def test_backend(self): assert backend1 is backend2 def test_thread_backend(self): - # Test that app.bakend returns the new backend for each thread + # Test that app.backend returns the new backend for each thread main_backend = self.app.backend from concurrent.futures import ThreadPoolExecutor with ThreadPoolExecutor(max_workers=1) as executor: diff --git a/t/unit/events/test_state.py b/t/unit/events/test_state.py index 9522d32cfa9..07582d15150 100644 --- a/t/unit/events/test_state.py +++ b/t/unit/events/test_state.py @@ -126,7 +126,7 @@ def setup(self): QTEV('succeeded', tB, 'w2', name='tB', clock=offset + 9), QTEV('started', tC, 'w2', name='tC', clock=offset + 10), QTEV('received', tA, 'w3', name='tA', clock=offset + 13), - QTEV('succeded', tC, 'w2', name='tC', clock=offset + 12), + QTEV('succeeded', tC, 'w2', name='tC', clock=offset + 12), QTEV('started', tA, 'w3', name='tA', clock=offset + 14), QTEV('succeeded', tA, 'w3', name='TA', clock=offset + 16), ] From 8b84b46f1bb56ce7e90768c9137fe0552e0c0ad8 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 16 Oct 2022 20:37:19 +0200 Subject: [PATCH 0207/1051] Remove isatty wrapper function --- celery/app/log.py | 3 +-- celery/apps/worker.py | 4 ++-- celery/platforms.py | 10 +--------- celery/utils/term.py | 4 +--- t/unit/utils/test_platforms.py | 13 +++---------- 5 files changed, 8 insertions(+), 26 deletions(-) diff --git a/celery/app/log.py b/celery/app/log.py index a4db1057791..4c807f4e349 100644 --- a/celery/app/log.py +++ b/celery/app/log.py @@ -18,7 +18,6 @@ from celery._state import get_current_task from celery.exceptions import CDeprecationWarning, CPendingDeprecationWarning from celery.local import class_property -from celery.platforms import isatty from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger, get_multiprocessing_logger, mlevel, reset_multiprocessing_logger) from celery.utils.nodenames import node_format @@ -204,7 +203,7 @@ def supports_color(self, colorize=None, logfile=None): if colorize or colorize is None: # Only use color if there's no active log file # and stderr is an actual terminal. - return logfile is None and isatty(sys.stderr) + return logfile is None and sys.stderr.isatty() return colorize def colored(self, logfile=None, enabled=None): diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 084f0b836f2..dcc04dac25b 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -20,7 +20,7 @@ from celery import VERSION_BANNER, platforms, signals from celery.app import trace from celery.loaders.app import AppLoader -from celery.platforms import EX_FAILURE, EX_OK, check_privileges, isatty +from celery.platforms import EX_FAILURE, EX_OK, check_privileges from celery.utils import static, term from celery.utils.debug import cry from celery.utils.imports import qualname @@ -106,7 +106,7 @@ def on_after_init(self, purge=False, no_color=None, super().setup_defaults(**kwargs) self.purge = purge self.no_color = no_color - self._isatty = isatty(sys.stdout) + self._isatty = sys.stdout.isatty() self.colored = self.app.log.colored( self.logfile, enabled=not no_color if no_color is not None else no_color diff --git a/celery/platforms.py b/celery/platforms.py index d06bbb24f4e..5690c72caa9 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -43,7 +43,7 @@ 'DaemonContext', 'detached', 'parse_uid', 'parse_gid', 'setgroups', 'initgroups', 'setgid', 'setuid', 'maybe_drop_privileges', 'signals', 'signal_name', 'set_process_title', 'set_mp_process_title', - 'get_errno_name', 'ignore_errno', 'fd_by_path', 'isatty', + 'get_errno_name', 'ignore_errno', 'fd_by_path', ) # exitcodes @@ -98,14 +98,6 @@ SIGMAP = {getattr(_signal, name): name for name in SIGNAMES} -def isatty(fh): - """Return true if the process has a controlling terminal.""" - try: - return fh.isatty() - except AttributeError: - pass - - def pyimplementation(): """Return string identifying the current Python implementation.""" if hasattr(_platform, 'python_implementation'): diff --git a/celery/utils/term.py b/celery/utils/term.py index 01c60adde1f..d7ab5cae625 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -6,8 +6,6 @@ import sys from functools import reduce -from celery.platforms import isatty - __all__ = ('colored',) BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) @@ -164,7 +162,7 @@ def __add__(self, other): def supports_images(): - return isatty(sys.stdin) and ITERM_PROFILE + return sys.stdin.isatty() and ITERM_PROFILE def _read_as_base64(path): diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index 8ca7c5f845d..ab1a9436543 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -13,9 +13,9 @@ from celery.exceptions import SecurityError, SecurityWarning from celery.platforms import (ASSUMING_ROOT, ROOT_DISALLOWED, ROOT_DISCOURAGED, DaemonContext, LockFailed, Pidfile, _setgroups_hack, check_privileges, close_open_fds, create_pidlock, detached, - fd_by_path, get_fdmax, ignore_errno, initgroups, isatty, maybe_drop_privileges, - parse_gid, parse_uid, set_mp_process_title, set_pdeathsig, set_process_title, setgid, - setgroups, setuid, signals) + fd_by_path, get_fdmax, ignore_errno, initgroups, maybe_drop_privileges, parse_gid, + parse_uid, set_mp_process_title, set_pdeathsig, set_process_title, setgid, setgroups, + setuid, signals) from celery.utils.text import WhateverIO from t.unit import conftest @@ -25,13 +25,6 @@ resource = None -def test_isatty(): - fh = Mock(name='fh') - assert isatty(fh) is fh.isatty() - fh.isatty.side_effect = AttributeError() - assert not isatty(fh) - - class test_find_option_with_arg: def test_long_opt(self): From f8aebff8b068fd383ce5c54311f049a06ccad563 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 16 Oct 2022 14:50:54 +0200 Subject: [PATCH 0208/1051] Remove unused variable `_range` --- celery/platforms.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/celery/platforms.py b/celery/platforms.py index 5690c72caa9..f424ac37ab4 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -13,7 +13,6 @@ import signal as _signal import sys import warnings -from collections import namedtuple from contextlib import contextmanager from billiard.compat import close_open_fds, get_fdmax @@ -65,8 +64,6 @@ PIDLOCKED = """ERROR: Pidfile ({0}) already exists. Seems we're already running? (pid: {1})""" -_range = namedtuple('_range', ('start', 'stop')) - ROOT_DISALLOWED = """\ Running a worker with superuser privileges when the worker accepts messages serialized with pickle is a very bad idea! From 6723791cc834d559caefc29d5700b87a10cfeccf Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Mon, 17 Oct 2022 10:52:13 +0200 Subject: [PATCH 0209/1051] Add type annotation on `concurrency/threads.py` (#7808) * Add type annotation on `concurrency/threads.py` * Update celery/concurrency/thread.py --- celery/concurrency/thread.py | 42 +++++++++++++++++++++++++++--------- celery/worker/request.py | 3 ++- pyproject.toml | 1 + 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/celery/concurrency/thread.py b/celery/concurrency/thread.py index ffd2e507f11..120374bcf9b 100644 --- a/celery/concurrency/thread.py +++ b/celery/concurrency/thread.py @@ -1,45 +1,67 @@ """Thread execution pool.""" +from __future__ import annotations -from concurrent.futures import ThreadPoolExecutor, wait +from concurrent.futures import Future, ThreadPoolExecutor, wait +from typing import TYPE_CHECKING, Any, Callable from .base import BasePool, apply_target __all__ = ('TaskPool',) +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 8): + from typing import TypedDict + else: + from typing_extensions import TypedDict + + PoolInfo = TypedDict('PoolInfo', {'max-concurrency': int, 'threads': int}) + + # `TargetFunction` should be a Protocol that represents fast_trace_task and + # trace_task_ret. + TargetFunction = Callable[..., Any] + class ApplyResult: - def __init__(self, future): + def __init__(self, future: Future) -> None: self.f = future self.get = self.f.result - def wait(self, timeout=None): + def wait(self, timeout: float | None = None) -> None: wait([self.f], timeout) class TaskPool(BasePool): """Thread Task Pool.""" + limit: int body_can_be_buffer = True signal_safe = False - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.executor = ThreadPoolExecutor(max_workers=self.limit) - def on_stop(self): + def on_stop(self) -> None: self.executor.shutdown() super().on_stop() - def on_apply(self, target, args=None, kwargs=None, callback=None, - accept_callback=None, **_): + def on_apply( + self, + target: TargetFunction, + args: tuple[Any, ...] | None = None, + kwargs: dict[str, Any] | None = None, + callback: Callable[..., Any] | None = None, + accept_callback: Callable[..., Any] | None = None, + **_: Any + ) -> ApplyResult: f = self.executor.submit(apply_target, target, args, kwargs, callback, accept_callback) return ApplyResult(f) - def _get_info(self): + def _get_info(self) -> PoolInfo: return { 'max-concurrency': self.limit, 'threads': len(self.executor._threads) - # TODO use a public api to retrieve the current number of threads - # in the executor when available. (Currently not available). } diff --git a/celery/worker/request.py b/celery/worker/request.py index d0004a19ccc..2bffea47e9b 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -17,6 +17,7 @@ from celery import current_app, signals from celery.app.task import Context from celery.app.trace import fast_trace_task, trace_task, trace_task_ret +from celery.concurrency.base import BasePool from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, TimeLimitExceeded, WorkerLostError) from celery.platforms import signals as _signals @@ -332,7 +333,7 @@ def correlation_id(self): # used similarly to reply_to return self._request_dict['correlation_id'] - def execute_using_pool(self, pool, **kwargs): + def execute_using_pool(self, pool: BasePool, **kwargs): """Used by the worker to send this task to the pool. Arguments: diff --git a/pyproject.toml b/pyproject.toml index cc090fe3b63..fd055e56cbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ files = [ "celery/states.py", "celery/signals.py", "celery/fixups", + "celery/concurrency/thread.py" ] [tool.coverage.report] From bc060a538f966f3fe2361c72c228f58ff3776469 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Mon, 17 Oct 2022 08:45:47 +0200 Subject: [PATCH 0210/1051] Fix linter pipeline --- .github/workflows/lint_python.yml | 30 ------------------------------ .github/workflows/linter.yml | 14 ++++++++++++++ .pre-commit-config.yaml | 6 ++++++ 3 files changed, 20 insertions(+), 30 deletions(-) delete mode 100644 .github/workflows/lint_python.yml create mode 100644 .github/workflows/linter.yml diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml deleted file mode 100644 index e434e9596e2..00000000000 --- a/.github/workflows/lint_python.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: lint Python -on: [pull_request] -jobs: - lint_python: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - - uses: pre-commit/action@v3.0.0 - - run: pip install --upgrade pip wheel - - run: pip install -U bandit codespell flake8 isort pytest pyupgrade tox - - - name: bandit - run: bandit -r . || true - - - name: Run CodeSpell - run: codespell --ignore-words-list="brane,gool,ist,sherif,wil" --quiet-level=2 --skip="*.key" || true - - run: pip install -r requirements.txt || true - - - name: Run tox - run: tox || true - - - name: Run pytest - run: pytest . || true - - - name: Test pytest with doctest - run: pytest --doctest-modules . || true - - - name: MyPy - run: tox -e mypy diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 00000000000..ac393f42798 --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,14 @@ +name: Linter + +on: [pull_request] + +jobs: + linter: + runs-on: ubuntu-latest + steps: + + - name: Checkout branch + uses: actions/checkout@v3 + + - name: Run pre-commit + uses: pre-commit/action@v3.0.0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7a5fe2c0c88..e787f01d423 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,3 +27,9 @@ repos: rev: 5.10.1 hooks: - id: isort + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.982 + hooks: + - id: mypy + pass_filenames: false From 49b807398bf1f6f7ce9735afcd0af6c0f159b67a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 17:00:48 +0000 Subject: [PATCH 0211/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e787f01d423..b127d2a3097 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.0.0 + rev: v3.1.0 hooks: - id: pyupgrade args: ["--py37-plus"] From 43924e3b0e29450e797a0101e60a43fc22928e6b Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 17 Oct 2022 12:05:37 -0500 Subject: [PATCH 0212/1051] Scheduled weekly dependency update for week 42 (#7821) * Update cryptography from 38.0.1 to 38.0.2 * Pin elasticsearch to latest version 8.4.3 * Update pycurl from 7.43.0.5 to 7.45.1 * Update requirements/extras/elasticsearch.txt * Update requirements/test-ci-default.txt Co-authored-by: Asif Saif Uddin --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 859fab375df..bd312a3a72c 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==38.0.1 +cryptography==38.0.2 From 3f4f2d87210f306696d48bbcca9831d5a722cb86 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Tue, 18 Oct 2022 21:01:17 +0200 Subject: [PATCH 0213/1051] Remove `.cookiecutterrc` --- .cookiecutterrc | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100644 .cookiecutterrc diff --git a/.cookiecutterrc b/.cookiecutterrc deleted file mode 100644 index fba1e8a6fd4..00000000000 --- a/.cookiecutterrc +++ /dev/null @@ -1,10 +0,0 @@ -default_context: - - email: 'ask@celeryproject.org' - full_name: 'Ask Solem' - github_username: 'celery' - project_name: 'Celery' - project_short_description: 'Distributed task queue', - project_slug: 'celery' - version: '1.0.0' - year: '2009-2016' From 1da6c281a67e4978e1d7bd8422b86f9aa105d854 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Wed, 19 Oct 2022 08:34:21 +0200 Subject: [PATCH 0214/1051] Remove `.coveragerc` file (#7826) * Remove `.coveragerc` file * Use booleans in `branch` and `cover_pylib` settings --- .coveragerc | 19 ------------------- pyproject.toml | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 19 deletions(-) delete mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 4d3146384b7..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,19 +0,0 @@ -[run] -branch = 1 -cover_pylib = 0 -include=*celery/* -omit = celery.tests.* - -[report] -omit = - */python?.?/* - */site-packages/* - */pypy/* - */celery/bin/graph.py - *celery/bin/logtool.py - *celery/task/base.py - *celery/contrib/sphinx.py - *celery/concurrency/asynpool.py - *celery/utils/debug.py - *celery/contrib/testing/* - *celery/contrib/pytest.py diff --git a/pyproject.toml b/pyproject.toml index fd055e56cbe..393f1d49656 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,9 +20,28 @@ files = [ "celery/concurrency/thread.py" ] +[tool.coverage.run] +branch = true +cover_pylib = false +include = ["*celery/*"] +omit = ["celery.tests.*"] + [tool.coverage.report] exclude_lines = [ "pragma: no cover", "if TYPE_CHECKING:", "except ImportError:" ] +omit = [ + "*/python?.?/*", + "*/site-packages/*", + "*/pypy/*", + "*/celery/bin/graph.py", + "*celery/bin/logtool.py", + "*celery/task/base.py", + "*celery/contrib/sphinx.py", + "*celery/concurrency/asynpool.py", + "*celery/utils/debug.py", + "*celery/contrib/testing/*", + "*celery/contrib/pytest.py" +] From 7e8c4a25aeac904cd0d371749d607a4abe10aad7 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 19 Oct 2022 16:27:17 +0300 Subject: [PATCH 0215/1051] Update documentation regarding TLS default value changes in py-amqp --- docs/history/whatsnew-5.1.rst | 20 ++++++++++++++++++++ docs/userguide/configuration.rst | 12 +++++++----- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/docs/history/whatsnew-5.1.rst b/docs/history/whatsnew-5.1.rst index 237b9722ba6..f35656d6ed3 100644 --- a/docs/history/whatsnew-5.1.rst +++ b/docs/history/whatsnew-5.1.rst @@ -208,6 +208,26 @@ Kombu Starting from v5.1, the minimum required version is Kombu 5.1.0. +Py-AMQP +~~~~~~~ + +Starting from Celery 5.1, py-amqp will always validate certificates received from the server +and it is no longer required to manually set ``cert_reqs`` to ``ssl.CERT_REQUIRED``. + +The previous default, ``ssl.CERT_NONE`` is insecure and we its usage should be discouraged. +If you'd like to revert to the previous insecure default set ``cert_reqs`` to ``ssl.CERT_NONE`` + +.. code-block:: python + + import ssl + + broker_use_ssl = { + 'keyfile': '/var/ssl/private/worker-key.pem', + 'certfile': '/var/ssl/amqp-server-cert.pem', + 'ca_certs': '/var/ssl/myca.pem', + 'cert_reqs': ssl.CERT_NONE + } + Billiard ~~~~~~~~ diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 3fa48f70233..5350d9fa2af 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2698,12 +2698,14 @@ certificate authority: 'cert_reqs': ssl.CERT_REQUIRED } -.. warning:: +.. versionadded:: 5.1 + + Starting from Celery 5.1, py-amqp will always validate certificates received from the server + and it is no longer required to manually set ``cert_reqs`` to ``ssl.CERT_REQUIRED``. + + The previous default, ``ssl.CERT_NONE`` is insecure and we its usage should be discouraged. + If you'd like to revert to the previous insecure default set ``cert_reqs`` to ``ssl.CERT_NONE`` - Be careful using ``broker_use_ssl=True``. It's possible that your default - configuration won't validate the server cert at all. Please read Python - `ssl module security - considerations `_. ``redis`` _________ From 1fca4377c6ed7e322e6c52d255365b7dadc509da Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 20 Oct 2022 11:25:18 +0600 Subject: [PATCH 0216/1051] kombu>=5.3.0b2 (#7834) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index ba30d7d31e8..34f4c77b685 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ pytz>=2021.3 billiard>=4.0.2,<5.0 -kombu>=5.3.0b1,<6.0 +kombu>=5.3.0b2,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From c9b593fcd6350193f3bbbb2fac3b9085c3557bad Mon Sep 17 00:00:00 2001 From: woutdenolf Date: Thu, 20 Oct 2022 07:29:18 +0200 Subject: [PATCH 0217/1051] Fix readthedocs build failure (#7835) * kombu>=5.3.0b2 * configure readthedocs to use requirements/docs.txt --- .readthedocs.yaml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000000..b296878a8d8 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,26 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: "3.9" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - method: pip + path: . + - requirements: requirements/docs.txt From 7d4fe22d03dabe1de2cf5009cc6ea1064b46edcb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 19 Oct 2022 23:13:14 +0300 Subject: [PATCH 0218/1051] Fixed bug in group, chord, chain stamp() method, where the visitor overrides the previously stamps in tasks of these objects (e.g. The tasks of the group had their previous stamps overridden partially) --- celery/canvas.py | 7 ++++ t/unit/tasks/test_canvas.py | 64 ++++++++++++++++++------------------- 2 files changed, 39 insertions(+), 32 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index a2aedd6334c..add9482b0fb 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -505,6 +505,10 @@ def stamp(self, visitor=None, **headers): else: headers["stamped_headers"] = [header for header in headers.keys() if header not in self.options] _merge_dictionaries(headers, self.options) + + stamped_headers = set(self.options.get("stamped_headers", [])) + stamped_headers.update(headers["stamped_headers"]) + headers["stamped_headers"] = list(stamped_headers) return self.set(**headers) def _with_list_option(self, key): @@ -1761,6 +1765,9 @@ def run(self, header, body, partial_args, app=None, interval=None, options = dict(self.options, **options) if options else self.options if options: options.pop('task_id', None) + stamped_headers = set(body.options.get("stamped_headers", [])) + stamped_headers.update(options["stamped_headers"]) + options["stamped_headers"] = list(stamped_headers) body.options.update(options) bodyres = body.freeze(task_id, root_id=root_id) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 33626f097c3..f4428a6c424 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -151,7 +151,7 @@ def test_double_stamping(self, subtests): assert sig_1_res._get_task_meta()["stamp2"] == ["stamp2"] with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): - assert sig_1_res._get_task_meta()["stamped_headers"] == ["stamp2", "stamp1", "groups"] + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp2", "stamp1", "groups"]) def test_twice_stamping(self, subtests): """ @@ -168,10 +168,10 @@ def test_twice_stamping(self, subtests): sig_1.apply() with subtests.test("sig_1_res is stamped twice", stamps=["stamp2", "stamp1"]): - assert sig_1_res._get_task_meta()["stamp"] == ["stamp2", "stamp1"] + assert sorted(sig_1_res._get_task_meta()["stamp"]) == sorted(["stamp2", "stamp1"]) with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): - assert sig_1_res._get_task_meta()["stamped_headers"] == ["stamp", "groups"] + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp", "groups"]) @pytest.mark.usefixtures('depends_on_current_app') def test_manual_stamping(self): @@ -188,7 +188,7 @@ def test_manual_stamping(self): sig_1.stamp(visitor=None, groups=stamps[0]) sig_1_res = sig_1.freeze() sig_1.apply() - assert sig_1_res._get_task_meta()['groups'] == stamps + assert sorted(sig_1_res._get_task_meta()['groups']) == sorted(stamps) def test_getitem_property_class(self): assert Signature.task @@ -804,10 +804,10 @@ def test_group_stamping_one_level(self, subtests): assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sig_1_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + assert sorted(sig_1_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): - assert sig_2_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + assert sorted(sig_2_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) def test_group_stamping_two_levels(self, subtests): """ @@ -854,11 +854,11 @@ def test_group_stamping_two_levels(self, subtests): with subtests.test("sig_2_res is stamped", groups=[g1_res.id]): assert sig_2_res._get_task_meta()['groups'] == [g1_res.id] with subtests.test("first_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): - assert first_nested_sig_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(first_nested_sig_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) with subtests.test("second_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): - assert second_nested_sig_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(second_nested_sig_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) def test_group_stamping_with_replace(self, subtests): """ @@ -988,17 +988,17 @@ def test_group_stamping_three_levels(self, subtests): with subtests.test("sig_in_g1_2_res is stamped", groups=[g1_res.id]): assert sig_in_g1_2_res._get_task_meta()['groups'] == [g1_res.id] with subtests.test("sig_in_g2_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sig_in_g2_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(sig_in_g2_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) with subtests.test("sig_in_g2_chain_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sig_in_g2_chain_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(sig_in_g2_chain_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) with subtests.test("sig_in_g3_1_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): - assert sig_in_g3_1_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id, g3_res.id] + assert sorted(sig_in_g3_1_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id, g3_res.id]) with subtests.test("sig_in_g3_2_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): - assert sig_in_g3_2_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id, g3_res.id] + assert sorted(sig_in_g3_2_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id, g3_res.id]) def test_group_stamping_parallel_groups(self, subtests): """ @@ -1067,14 +1067,14 @@ def test_group_stamping_parallel_groups(self, subtests): with subtests.test("sig_in_g2_1 is stamped", groups=[g1_res.id, g2_res.id]): assert sig_in_g2_1_res.id == 'sig_in_g2_1' - assert sig_in_g2_1_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(sig_in_g2_1_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) with subtests.test("sig_in_g2_2 is stamped", groups=[g1_res.id, g2_res.id]): assert sig_in_g2_2_res.id == 'sig_in_g2_2' - assert sig_in_g2_2_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] + assert sorted(sig_in_g2_2_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g2_res.id]) with subtests.test("sig_in_g3_chain is stamped", groups=[g1_res.id]): @@ -1085,13 +1085,13 @@ def test_group_stamping_parallel_groups(self, subtests): with subtests.test("sig_in_g3_1 is stamped", groups=[g1_res.id, g3_res.id]): assert sig_in_g3_1_res.id == 'sig_in_g3_1' - assert sig_in_g3_1_res._get_task_meta()['groups'] == \ - [g1_res.id, g3_res.id] + assert sorted(sig_in_g3_1_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g3_res.id]) with subtests.test("sig_in_g3_2 is stamped", groups=[g1_res.id, g3_res.id]): - assert sig_in_g3_2_res._get_task_meta()['groups'] == \ - [g1_res.id, g3_res.id] + assert sorted(sig_in_g3_2_res._get_task_meta()['groups']) == \ + sorted([g1_res.id, g3_res.id]) def test_repr(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) @@ -1520,10 +1520,10 @@ def test_chord_stamping_one_level(self, subtests): assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sig_1_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + assert sorted(sig_1_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sig_2_res._get_task_meta()['stamped_headers'] == ['stamp', 'groups'] + assert sorted(sig_2_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) def test_chord_stamping_two_levels(self, subtests): """ @@ -1565,11 +1565,11 @@ def test_chord_stamping_two_levels(self, subtests): with subtests.test("sig_2_res body is stamped", groups=[g1.id]): assert sig_2_res._get_task_meta()['groups'] == [g1.id] with subtests.test("first_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): - assert first_nested_sig_res._get_task_meta()['groups'] == \ - [g1.id, g2_res.id] + assert sorted(first_nested_sig_res._get_task_meta()['groups']) == \ + sorted([g1.id, g2_res.id]) with subtests.test("second_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): - assert second_nested_sig_res._get_task_meta()['groups'] == \ - [g1.id, g2_res.id] + assert sorted(second_nested_sig_res._get_task_meta()['groups']) == \ + sorted([g1.id, g2_res.id]) def test_chord_stamping_body_group(self, subtests): """ From 3a5a5c2b7c4301d55951917c4d3a54a7f98486c5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 21 Oct 2022 01:58:52 +0300 Subject: [PATCH 0219/1051] Stabilized test_mutable_errback_called_by_chord_from_group_fail_multiple --- t/integration/test_canvas.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 33ed392944b..8e805db49b7 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2472,10 +2472,8 @@ def test_immutable_errback_called_by_chord_from_group_fail_multiple( await_redis_count(fail_task_count, redis_key=redis_key) redis_connection.delete(redis_key) - @pytest.mark.parametrize( - "errback_task", [errback_old_style, errback_new_style, ], - ) - def test_mutable_errback_called_by_chord_from_group_fail_multiple( + @pytest.mark.parametrize("errback_task", [errback_old_style, errback_new_style]) + def test_mutable_errback_called_by_chord_from_group_fail_multiple_on_header_failure( self, errback_task, manager, subtests ): if not manager.app.conf.result_backend.startswith("redis"): @@ -2488,11 +2486,10 @@ def test_mutable_errback_called_by_chord_from_group_fail_multiple( fail_sigs = tuple( fail.s() for _ in range(fail_task_count) ) - fail_sig_ids = tuple(s.freeze().id for s in fail_sigs) errback = errback_task.s() # Include a mix of passing and failing tasks child_sig = group( - *(identity.si(42) for _ in range(24)), # arbitrary task count + *(identity.si(42) for _ in range(8)), # arbitrary task count *fail_sigs, ) @@ -2510,6 +2507,28 @@ def test_mutable_errback_called_by_chord_from_group_fail_multiple( # is attached to the chord body which is a single task! await_redis_count(1, redis_key=expected_redis_key) + @pytest.mark.parametrize("errback_task", [errback_old_style, errback_new_style]) + def test_mutable_errback_called_by_chord_from_group_fail_multiple_on_body_failure( + self, errback_task, manager, subtests + ): + if not manager.app.conf.result_backend.startswith("redis"): + raise pytest.skip("Requires redis result backend.") + redis_connection = get_redis_connection() + + fail_task_count = 42 + # We have to use failing task signatures with unique task IDs to ensure + # the chord can complete when they are used as part of its header! + fail_sigs = tuple( + fail.s() for _ in range(fail_task_count) + ) + fail_sig_ids = tuple(s.freeze().id for s in fail_sigs) + errback = errback_task.s() + # Include a mix of passing and failing tasks + child_sig = group( + *(identity.si(42) for _ in range(8)), # arbitrary task count + *fail_sigs, + ) + chord_sig = chord((identity.si(42),), child_sig) chord_sig.link_error(errback) for fail_sig_id in fail_sig_ids: From e0b0af6c7af9f7a127ae0321dc4e798433c89592 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 21 Oct 2022 18:40:54 +0200 Subject: [PATCH 0220/1051] Use SPDX license expression in project metadata --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b1876c5f501..8000d5b3c42 100755 --- a/setup.py +++ b/setup.py @@ -160,7 +160,7 @@ def run_tests(self): author=meta['author'], author_email=meta['contact'], url=meta['homepage'], - license='BSD', + license='BSD-3-Clause', platforms=['any'], install_requires=install_requires(), python_requires=">=3.7", From 5092598fb88c1f18e3fe709861cdb31df90a7264 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 27 Oct 2022 01:29:20 +0300 Subject: [PATCH 0221/1051] New control command `revoke_by_stamped_headers` (#7838) * Added pytest-order==1.0.1 * Added a new control command `revoke_by_stamped_headers` to revoke tasks by their stamped header instead of task id (terminate only works on running tasks in memory) --- celery/app/control.py | 35 ++++++++++++++ celery/result.py | 24 ++++++++++ celery/worker/control.py | 59 +++++++++++++++++++++++- celery/worker/request.py | 40 ++++++++++++---- celery/worker/state.py | 4 ++ docs/userguide/workers.rst | 65 ++++++++++++++++++++++++++ requirements/test.txt | 1 + t/integration/test_tasks.py | 86 ++++++++++++++++++++++++++++++++++- t/unit/app/test_control.py | 35 ++++++++++++++ t/unit/worker/test_control.py | 20 +++++++- t/unit/worker/test_state.py | 1 + 11 files changed, 358 insertions(+), 12 deletions(-) diff --git a/celery/app/control.py b/celery/app/control.py index 551ae68bf8b..52763e8a5f5 100644 --- a/celery/app/control.py +++ b/celery/app/control.py @@ -499,6 +499,41 @@ def revoke(self, task_id, destination=None, terminate=False, 'signal': signal, }, **kwargs) + def revoke_by_stamped_headers(self, headers, destination=None, terminate=False, + signal=TERM_SIGNAME, **kwargs): + """ + Tell all (or specific) workers to revoke a task by headers. + + If a task is revoked, the workers will ignore the task and + not execute it after all. + + Arguments: + headers (dict[str, Union(str, list)]): Headers to match when revoking tasks. + terminate (bool): Also terminate the process currently working + on the task (if any). + signal (str): Name of signal to send to process if terminate. + Default is TERM. + + See Also: + :meth:`broadcast` for supported keyword arguments. + """ + result = self.broadcast('revoke_by_stamped_headers', destination=destination, arguments={ + 'headers': headers, + 'terminate': terminate, + 'signal': signal, + }, **kwargs) + + task_ids = set() + if result: + for host in result: + for response in host.values(): + task_ids.update(response['ok']) + + if task_ids: + return self.revoke(list(task_ids), destination=destination, terminate=terminate, signal=signal, **kwargs) + else: + return result + def terminate(self, task_id, destination=None, signal=TERM_SIGNAME, **kwargs): """Tell all (or specific) workers to terminate a task by id (or list of ids). diff --git a/celery/result.py b/celery/result.py index ecbe17cb569..3dcd02523ee 100644 --- a/celery/result.py +++ b/celery/result.py @@ -161,6 +161,30 @@ def revoke(self, connection=None, terminate=False, signal=None, terminate=terminate, signal=signal, reply=wait, timeout=timeout) + def revoke_by_stamped_headers(self, headers, connection=None, terminate=False, signal=None, + wait=False, timeout=None): + """Send revoke signal to all workers only for tasks with matching headers values. + + Any worker receiving the task, or having reserved the + task, *must* ignore it. + All header fields *must* match. + + Arguments: + headers (dict[str, Union(str, list)]): Headers to match when revoking tasks. + terminate (bool): Also terminate the process currently working + on the task (if any). + signal (str): Name of signal to send to process if terminate. + Default is TERM. + wait (bool): Wait for replies from workers. + The ``timeout`` argument specifies the seconds to wait. + Disabled by default. + timeout (float): Time in seconds to wait for replies when + ``wait`` is enabled. + """ + self.app.control.revoke_by_stamped_headers(headers, connection=connection, + terminate=terminate, signal=signal, + reply=wait, timeout=timeout) + def get(self, timeout=None, propagate=True, interval=0.5, no_ack=True, follow_parents=True, callback=None, on_message=None, on_interval=None, disable_sync_subtasks=True, diff --git a/celery/worker/control.py b/celery/worker/control.py index 197d0c4d617..89a4feb2c63 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -1,12 +1,13 @@ """Worker remote control command implementations.""" import io import tempfile +import warnings from collections import UserDict, namedtuple from billiard.common import TERM_SIGNAME from kombu.utils.encoding import safe_repr -from celery.exceptions import WorkerShutdown +from celery.exceptions import CeleryWarning, WorkerShutdown from celery.platforms import signals as _signals from celery.utils.functional import maybe_list from celery.utils.log import get_logger @@ -146,6 +147,60 @@ def revoke(state, task_id, terminate=False, signal=None, **kwargs): # Outside of this scope that is a function. # supports list argument since 3.1 task_ids, task_id = set(maybe_list(task_id) or []), None + task_ids = _revoke(state, task_ids, terminate, signal, **kwargs) + return ok(f'tasks {task_ids} flagged as revoked') + + +@control_command( + variadic='headers', + signature='[key1=value1 [key2=value2 [... [keyN=valueN]]]]', +) +def revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kwargs): + """Revoke task by header (or list of headers). + + Keyword Arguments: + terminate (bool): Also terminate the process if the task is active. + signal (str): Name of signal to use for terminate (e.g., ``KILL``). + """ + # pylint: disable=redefined-outer-name + # XXX Note that this redefines `terminate`: + # Outside of this scope that is a function. + # supports list argument since 3.1 + if isinstance(headers, list): + headers = {h.split('=')[0]: h.split('=')[1] for h in headers}, None + + worker_state.revoked_headers.update(headers) + + if not terminate: + return ok(f'headers {headers} flagged as revoked') + + task_ids = set() + requests = list(worker_state.active_requests) + + # Terminate all running tasks of matching headers + if requests: + warnings.warn( + "Terminating tasks by headers does not scale well when worker concurrency is high", + CeleryWarning + ) + + for req in requests: + if req.stamped_headers: + for stamped_header_key, expected_header_value in headers.items(): + if stamped_header_key in req.stamped_headers and \ + stamped_header_key in req._message.headers['stamps']: + actual_header = req._message.headers['stamps'][stamped_header_key] + if expected_header_value in actual_header: + task_ids.add(req.task_id) + continue + + task_ids = _revoke(state, task_ids, terminate, signal, **kwargs) + if isinstance(task_ids, dict): + return task_ids + return ok(list(task_ids)) + + +def _revoke(state, task_ids, terminate=False, signal=None, **kwargs): size = len(task_ids) terminated = set() @@ -166,7 +221,7 @@ def revoke(state, task_id, terminate=False, signal=None, **kwargs): idstr = ', '.join(task_ids) logger.info('Tasks flagged as revoked: %s', idstr) - return ok(f'tasks {idstr} flagged as revoked') + return task_ids @control_command( diff --git a/celery/worker/request.py b/celery/worker/request.py index 2bffea47e9b..b409bdc60da 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -21,7 +21,7 @@ from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, TimeLimitExceeded, WorkerLostError) from celery.platforms import signals as _signals -from celery.utils.functional import maybe, noop +from celery.utils.functional import maybe, maybe_list, noop from celery.utils.log import get_logger from celery.utils.nodenames import gethostname from celery.utils.serialization import get_pickled_exception @@ -61,6 +61,7 @@ def __optimize__(): task_accepted = state.task_accepted task_ready = state.task_ready revoked_tasks = state.revoked +revoked_headers = state.revoked_headers class Request: @@ -402,9 +403,9 @@ def execute(self, loglevel=None, logfile=None): def maybe_expire(self): """If expired, mark the task as revoked.""" - if self._expires: - now = datetime.now(self._expires.tzinfo) - if now > self._expires: + if self.expires: + now = datetime.now(self.expires.tzinfo) + if now > self.expires: revoked_tasks.add(self.id) return True @@ -462,10 +463,33 @@ def revoked(self): expired = False if self._already_revoked: return True - if self._expires: + if self.expires: expired = self.maybe_expire() - if self.id in revoked_tasks: - info('Discarding revoked task: %s[%s]', self.name, self.id) + revoked_by_id = self.id in revoked_tasks + revoked_by_header, revoking_header = False, None + + if not revoked_by_id and self.stamped_headers: + for header in self.stamped_headers: + if header in revoked_headers: + revoked_header = revoked_headers[header] + stamped_header = self._message.headers['stamps'][header] + + if isinstance(stamped_header, (list, tuple)): + for stamped_value in stamped_header: + if stamped_value in maybe_list(revoked_header): + revoked_by_header = True + revoking_header = {header: stamped_value} + break + else: + revoked_by_header = stamped_header in revoked_headers[header] + revoking_header = {header: stamped_header} + break + + if any((expired, revoked_by_id, revoked_by_header)): + log_msg = 'Discarding revoked task: %s[%s]' + if revoked_by_header: + log_msg += ' (revoked by header: %s)' % revoking_header + info(log_msg, self.name, self.id) self._announce_revoked( 'expired' if expired else 'revoked', False, None, expired, ) @@ -719,7 +743,7 @@ class Request(base): def execute_using_pool(self, pool, **kwargs): task_id = self.task_id - if (self.expires or task_id in revoked_tasks) and self.revoked(): + if self.revoked(): raise TaskRevokedError(task_id) time_limit, soft_time_limit = self.time_limits diff --git a/celery/worker/state.py b/celery/worker/state.py index 97f49150286..74b28d4397e 100644 --- a/celery/worker/state.py +++ b/celery/worker/state.py @@ -67,6 +67,9 @@ #: the list of currently revoked tasks. Persistent if ``statedb`` set. revoked = LimitedSet(maxlen=REVOKES_MAX, expires=REVOKE_EXPIRES) +#: Mapping of stamped headers flagged for revoking. +revoked_headers = {} + should_stop = None should_terminate = None @@ -79,6 +82,7 @@ def reset_state(): total_count.clear() all_total_count[:] = [0] revoked.clear() + revoked_headers.clear() def maybe_shutdown(): diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 03ac8a9aa5e..113afc78e07 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -468,6 +468,71 @@ Note that remote control commands must be working for revokes to work. Remote control commands are only supported by the RabbitMQ (amqp) and Redis at this point. +.. control:: revoke_by_stamped_header + +``revoke_by_stamped_header``: Revoking tasks by their stamped headers +--------------------------------------------------------------------- +:pool support: all, terminate only supported by prefork and eventlet +:broker support: *amqp, redis* +:command: :program:`celery -A proj control revoke_by_stamped_header ` + +This command is similar to :meth:`~@control.revoke`, but instead of +specifying the task id(s), you specify the stamped header(s) as key-value pair(s), +and each task that has a stamped header matching the key-value pair(s) will be revoked. + +.. warning:: + + The revoked headers mapping is not persistent across restarts, so if you + restart the workers, the revoked headers will be lost and need to be + mapped again. + +.. warning:: + + This command may perform poorly if your worker pool concurrency is high + and terminate is enabled, since it will have to iterate over all the runnig + tasks to find the ones with the specified stamped header. + +**Example** + +.. code-block:: pycon + + >>> app.control.revoke_by_stamped_header({'header': 'value'}) + + >>> app.control.revoke_by_stamped_header({'header': 'value'}, terminate=True) + + >>> app.control.revoke_by_stamped_header({'header': 'value'}, terminate=True, signal='SIGKILL') + + +Revoking multiple tasks by stamped headers +------------------------------------------ + +.. versionadded:: 5.3 + +The ``revoke_by_stamped_header`` method also accepts a list argument, where it will revoke +by several headers or several values. + +**Example** + +.. code-block:: pycon + + >> app.control.revoke_by_stamped_header({ + ... 'header_A': 'value_1', + ... 'header_B': ['value_2', 'value_3'], + }) + +This will revoke all of the tasks that have a stamped header ``header_A`` with value ``value_1``, +and all of the tasks that have a stamped header ``header_B`` with values ``value_2`` or ``value_3``. + +**CLI Example** + +.. code-block:: console + + $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 + + $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate + + $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate --signal=SIGKILL + .. _worker-time-limits: Time Limits diff --git a/requirements/test.txt b/requirements/test.txt index 9fde7200688..1b4a57ab118 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -3,6 +3,7 @@ pytest-celery==0.0.0 pytest-subtests==0.8.0 pytest-timeout~=2.1.0 pytest-click==1.1.0 +pytest-order==1.0.1 boto3>=1.9.178 moto>=2.2.6 # typing extensions diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index bfbaaab2723..b1da3da1029 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,10 +1,13 @@ from datetime import datetime, timedelta from time import perf_counter, sleep +from uuid import uuid4 import pytest import celery -from celery import group +from celery import chain, chord, group +from celery.canvas import StampingVisitor +from celery.worker import state as worker_state from .conftest import get_active_redis_channels from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, @@ -195,6 +198,87 @@ def test_revoked(self, manager): assert result.failed() is False assert result.successful() is False + def test_revoked_by_headers_simple_canvas(self, manager): + """Testing revoking of task using a stamped header""" + target_monitoring_id = uuid4().hex + + class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': target_monitoring_id, 'stamped_headers': ['monitoring_id']} + + for monitoring_id in [target_monitoring_id, uuid4().hex, 4242, None]: + stamped_task = add.si(1, 1) + stamped_task.stamp(visitor=MonitoringIdStampingVisitor()) + result = stamped_task.freeze() + result.revoke_by_stamped_headers(headers={'monitoring_id': [monitoring_id]}) + stamped_task.apply_async() + if monitoring_id == target_monitoring_id: + with pytest.raises(celery.exceptions.TaskRevokedError): + result.get() + assert result.status == 'REVOKED' + assert result.ready() is True + assert result.failed() is False + assert result.successful() is False + else: + assert result.get() == 2 + assert result.status == 'SUCCESS' + assert result.ready() is True + assert result.failed() is False + assert result.successful() is True + worker_state.revoked_headers.clear() + + # This test leaves the environment dirty, + # so we let it run last in the suite to avoid + # affecting other tests until we can fix it. + @pytest.mark.order("last") + @pytest.mark.parametrize('monitoring_id', [ + "4242", + [1234, uuid4().hex], + ]) + def test_revoked_by_headers_complex_canvas(self, manager, subtests, monitoring_id): + """Testing revoking of task using a stamped header""" + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + target_monitoring_id = isinstance(monitoring_id, list) and monitoring_id[0] or monitoring_id + + class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': target_monitoring_id, 'stamped_headers': ['monitoring_id']} + + stamped_task = sleeping.si(4) + stamped_task.stamp(visitor=MonitoringIdStampingVisitor()) + result = stamped_task.freeze() + + canvas = [ + group([stamped_task]), + chord(group([stamped_task]), sleeping.si(2)), + chord(group([sleeping.si(2)]), stamped_task), + chain(stamped_task), + group([sleeping.si(2), stamped_task, sleeping.si(2)]), + chord([sleeping.si(2), stamped_task], sleeping.si(2)), + chord([sleeping.si(2), sleeping.si(2)], stamped_task), + chain(sleeping.si(2), stamped_task), + chain(sleeping.si(2), group([sleeping.si(2), stamped_task, sleeping.si(2)])), + chain(sleeping.si(2), group([sleeping.si(2), stamped_task]), sleeping.si(2)), + chain(sleeping.si(2), group([sleeping.si(2), sleeping.si(2)]), stamped_task), + ] + + result.revoke_by_stamped_headers(headers={'monitoring_id': monitoring_id}) + + for sig in canvas: + sig_result = sig.apply_async() + with subtests.test(msg='Testing if task was revoked'): + with pytest.raises(celery.exceptions.TaskRevokedError): + sig_result.get() + assert result.status == 'REVOKED' + assert result.ready() is True + assert result.failed() is False + assert result.successful() is False + worker_state.revoked_headers.clear() + @flaky def test_wrong_arguments(self, manager): """Tests that proper exceptions are raised when task is called with wrong arguments.""" diff --git a/t/unit/app/test_control.py b/t/unit/app/test_control.py index 37fa3e8b2ae..eb6a761e837 100644 --- a/t/unit/app/test_control.py +++ b/t/unit/app/test_control.py @@ -424,6 +424,16 @@ def test_revoke(self): terminate=False, ) + def test_revoke_by_stamped_headers(self): + self.app.control.revoke_by_stamped_headers({'foo': 'bar'}) + self.assert_control_called_with_args( + 'revoke_by_stamped_headers', + destination=None, + headers={'foo': 'bar'}, + signal=control.TERM_SIGNAME, + terminate=False, + ) + def test_revoke__with_options(self): self.app.control.revoke( 'foozbaaz', @@ -441,6 +451,23 @@ def test_revoke__with_options(self): _options={'limit': 404}, ) + def test_revoke_by_stamped_headers__with_options(self): + self.app.control.revoke_by_stamped_headers( + {'foo': 'bar'}, + destination='a@q.com', + terminate=True, + signal='KILL', + limit=404, + ) + self.assert_control_called_with_args( + 'revoke_by_stamped_headers', + destination='a@q.com', + headers={'foo': 'bar'}, + signal='KILL', + terminate=True, + _options={'limit': 404}, + ) + def test_election(self): self.app.control.election('some_id', 'topic', 'action') self.assert_control_called_with_args( @@ -499,6 +526,14 @@ def test_revoke_from_result(self): connection=None, reply=False, signal=None, terminate=False, timeout=None) + def test_revoke_by_stamped_headers_from_result(self): + self.app.control.revoke_by_stamped_headers = Mock(name='revoke_by_stamped_headers') + self.app.AsyncResult('foozbazzbar').revoke_by_stamped_headers({'foo': 'bar'}) + self.app.control.revoke_by_stamped_headers.assert_called_with( + {'foo': 'bar'}, + connection=None, reply=False, signal=None, + terminate=False, timeout=None) + def test_revoke_from_resultset(self): self.app.control.revoke = Mock(name='revoke') uuids = [uuid() for _ in range(10)] diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index 0d53d65e3bc..33cc521cb5c 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -17,7 +17,7 @@ from celery.worker import state as worker_state from celery.worker.pidbox import Pidbox, gPidbox from celery.worker.request import Request -from celery.worker.state import REVOKE_EXPIRES, revoked +from celery.worker.state import REVOKE_EXPIRES, revoked, revoked_headers hostname = socket.gethostname() @@ -544,6 +544,24 @@ def test_revoke_terminate(self): finally: worker_state.task_ready(request) + def test_revoke_by_stamped_headers_terminate(self): + request = Mock() + request.id = uuid() + request.options = stamped_header = {'stamp': 'foo'} + request.options['stamped_headers'] = ['stamp'] + state = self.create_state() + state.consumer = Mock() + worker_state.task_reserved(request) + try: + r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) + assert stamped_header == revoked_headers + assert 'terminate:' in r['ok'] + # unknown task id only revokes + r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) + assert 'tasks unknown' in r['ok'] + finally: + worker_state.task_ready(request) + def test_autoscale(self): self.panel.state.consumer = Mock() self.panel.state.consumer.controller = Mock() diff --git a/t/unit/worker/test_state.py b/t/unit/worker/test_state.py index 7388c49bb9f..bdff94facbf 100644 --- a/t/unit/worker/test_state.py +++ b/t/unit/worker/test_state.py @@ -19,6 +19,7 @@ def reset_state(): yield state.active_requests.clear() state.revoked.clear() + state.revoked_headers.clear() state.total_count.clear() From dde1040fafe59fd8a92f3352f451216dd1a2a908 Mon Sep 17 00:00:00 2001 From: AJ Jordan Date: Tue, 25 Oct 2022 14:41:50 -0400 Subject: [PATCH 0222/1051] Clarify wording in Redis priority docs --- docs/userguide/routing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/routing.rst b/docs/userguide/routing.rst index 1dbac6807cf..a5d58755427 100644 --- a/docs/userguide/routing.rst +++ b/docs/userguide/routing.rst @@ -304,8 +304,8 @@ The config above will give you these queue names: That said, note that this will never be as good as priorities implemented at the -server level, and may be approximate at best. But it may still be good enough -for your application. +broker server level, and may be approximate at best. But it may still be good +enough for your application. AMQP Primer From 914efb03c8368d7d53be2d45518f3188c3312cba Mon Sep 17 00:00:00 2001 From: root Date: Fri, 28 Oct 2022 11:17:06 +0800 Subject: [PATCH 0223/1051] Fix non working example of using celery_worker pytest fixture --- docs/userguide/testing.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index dcf9cdc35b2..4c83e350ffc 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -160,7 +160,8 @@ Example: @celery_app.task def mul(x, y): return x * y - + + celery_worker.reload() assert mul.delay(4, 4).get(timeout=10) == 16 ``celery_worker`` - Embed live worker. From 53dd65e3275eac017070f350ace9fc2326c0a8d0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 30 Oct 2022 12:41:36 +0200 Subject: [PATCH 0224/1051] StampingVisitor `on_signature()` required returning a key with the list of stamped header keys. It will now implicity assume all given keys are the stamped header keys, if not overriden by an explicit "stamped_headers" key in the returned value (like it required before this patch) --- celery/canvas.py | 5 ++++- docs/userguide/canvas.rst | 17 +++++++++++++-- t/integration/test_tasks.py | 2 +- t/unit/tasks/test_canvas.py | 43 +++++++++++++++++++++++++++++++++++-- 4 files changed, 61 insertions(+), 6 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index add9482b0fb..c1e59e54a5b 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -501,7 +501,10 @@ def stamp(self, visitor=None, **headers): """ headers = headers.copy() if visitor is not None: - headers.update(visitor.on_signature(self, **headers)) + visitor_headers = visitor.on_signature(self, **headers) + if "stamped_headers" not in visitor_headers: + visitor_headers["stamped_headers"] = list(visitor_headers.keys()) + headers.update(visitor_headers) else: headers["stamped_headers"] = [header for header in headers.keys() if header not in self.options] _merge_dictionaries(headers, self.options) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index b8db4c315b6..29046839f34 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1232,9 +1232,22 @@ the external monitoring system. class MonitoringIdStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'monitoring_id': uuid4(), 'stamped_headers': ['monitoring_id']} + return {'monitoring_id': uuid4().hex} -Next, lets see how to use the ``MonitoringIdStampingVisitor`` stamping visitor. +.. note:: + + The ``stamped_headers`` key returned in ``on_signature`` is used to specify the headers that will be + stamped on the task. If this key is not specified, the stamping visitor will assume all keys in the + returned dictionary are the stamped headers from the visitor. + This means the following code block will result in the same behavior as the previous example. + +.. code-block:: python + + class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': uuid4().hex, 'stamped_headers': ['monitoring_id']} + +Next, lets see how to use the ``MonitoringIdStampingVisitor`` example stamping visitor. .. code-block:: python diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index b1da3da1029..ee131e02622 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -204,7 +204,7 @@ def test_revoked_by_headers_simple_canvas(self, manager): class MonitoringIdStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'monitoring_id': target_monitoring_id, 'stamped_headers': ['monitoring_id']} + return {'monitoring_id': target_monitoring_id} for monitoring_id in [target_monitoring_id, uuid4().hex, 4242, None]: stamped_task = add.si(1, 1) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index f4428a6c424..7ec18f5ea78 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -6,8 +6,8 @@ import pytest_subtests # noqa: F401 from celery._state import _task_stack -from celery.canvas import (Signature, _chain, _maybe_group, chain, chord, chunks, group, maybe_signature, - maybe_unroll_group, signature, xmap, xstarmap) +from celery.canvas import (Signature, StampingVisitor, _chain, _maybe_group, chain, chord, chunks, group, + maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) from celery.result import AsyncResult, EagerResult, GroupResult SIG = Signature({ @@ -190,6 +190,45 @@ def test_manual_stamping(self): sig_1.apply() assert sorted(sig_1_res._get_task_meta()['groups']) == sorted(stamps) + def test_custom_stamping_visitor(self, subtests): + """ + Test manual signature stamping with a custom visitor class. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor1(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + # without using stamped_headers key explicitly + # the key will be calculated from the headers implicitly + return {'header': 'value'} + + class CustomStampingVisitor2(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value', 'stamped_headers': ['header']} + + sig_1 = self.add.s(2, 2) + sig_1.stamp(visitor=CustomStampingVisitor1()) + sig_1_res = sig_1.freeze() + sig_1.apply() + sig_2 = self.add.s(2, 2) + sig_2.stamp(visitor=CustomStampingVisitor2()) + sig_2_res = sig_2.freeze() + sig_2.apply() + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("sig_1 is stamped with custom visitor", header=["value"]): + assert sig_1_res._get_task_meta()["header"] == ["value"] + + with subtests.test("sig_2 is stamped with custom visitor", header=["value"]): + assert sig_2_res._get_task_meta()["header"] == ["value"] + def test_getitem_property_class(self): assert Signature.task assert Signature.args From 0034a7b496f1892c336f771c1fcaf6a8be14c573 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sondre=20Lilleb=C3=B8=20Gundersen?= Date: Sun, 30 Oct 2022 13:39:24 +0100 Subject: [PATCH 0225/1051] Update serializer docs (#7858) * Update serializer docs The msgpack disclaimer no longer applies, and the docs now link to the accept_content setting * Remove simplejson mention --- docs/userguide/calling.rst | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 30894849098..038a43dce18 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -453,8 +453,7 @@ them into the Kombu serializer registry Each option has its advantages and disadvantages. json -- JSON is supported in many programming languages, is now - a standard part of Python (since 2.6), and is fairly fast to decode - using the modern Python libraries, such as :pypi:`simplejson`. + a standard part of Python (since 2.6), and is fairly fast to decode. The primary disadvantage to JSON is that it limits you to the following data types: strings, Unicode, floats, Boolean, dictionaries, and lists. @@ -498,17 +497,29 @@ yaml -- YAML has many of the same characteristics as json, If you need a more expressive set of data types and need to maintain cross-language compatibility, then YAML may be a better fit than the above. + To use it, install Celery with: + + .. code-block:: console + + $ pip install celery[yaml] + See http://yaml.org/ for more information. msgpack -- msgpack is a binary serialization format that's closer to JSON - in features. It's very young however, and support should be considered - experimental at this point. + in features. The format compresses better, so is a faster to parse and + encode compared to JSON. + + To use it, install Celery with: + + .. code-block:: console + + $ pip install celery[msgpack] See http://msgpack.org/ for more information. -The encoding used is available as a message header, so the worker knows how to -deserialize any task. If you use a custom serializer, this serializer must -be available for the worker. +To use a custom serializer you need add the content type to +:setting:`accept_content`. By default, only JSON is accepted, +and tasks containing other content headers are rejected. The following order is used to decide the serializer used when sending a task: From fd5e65f170c5dc85ffb68def11937ac01a390bff Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Tue, 18 Oct 2022 20:48:33 +0200 Subject: [PATCH 0226/1051] Remove reference to old Python version --- requirements/README.rst | 4 ++-- t/unit/utils/test_serialization.py | 5 +---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/requirements/README.rst b/requirements/README.rst index 8224e322d6d..890bb189a68 100644 --- a/requirements/README.rst +++ b/requirements/README.rst @@ -8,7 +8,7 @@ Index * :file:`requirements/default.txt` - Default requirements for Python 2.7+. + Default requirements for Python 3.7+. * :file:`requirements/jython.txt` @@ -29,7 +29,7 @@ Index * :file:`requirements/test-ci-default.txt` - Extra test requirements required for Python 2.7 by the CI suite (Tox). + Extra test requirements required for Python 3.7 by the CI suite (Tox). * :file:`requirements/test-integration.txt` diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py index 1a4ca3b9d3a..b5617ed2bfb 100644 --- a/t/unit/utils/test_serialization.py +++ b/t/unit/utils/test_serialization.py @@ -96,10 +96,7 @@ def test_default_table(self, s, b): assert strtobool(s) == b def test_unknown_value(self): - with pytest.raises(TypeError, - # todo replace below when dropping python 2.7 - # match="Cannot coerce 'foo' to type bool"): - match=r"Cannot coerce u?'foo' to type bool"): + with pytest.raises(TypeError, match="Cannot coerce 'foo' to type bool"): strtobool('foo') def test_no_op(self): From 6c09495d22460d3eb6fad159ff5c31e529017548 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 30 Oct 2022 14:29:02 +0200 Subject: [PATCH 0227/1051] Added on_replace() to Task to allow manipulating the replaced sig with custom changes at the end of the task.replace() --- celery/app/task.py | 25 +++++++++++++++++++------ t/unit/tasks/test_tasks.py | 19 +++++++++++++++++++ 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index e3c0fcf0ac1..22794fd16de 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -958,12 +958,7 @@ def replace(self, sig): groups = self.request.stamps.get("groups") sig.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) - # Finally, either apply or delay the new signature! - if self.request.is_eager: - return sig.apply().get() - else: - sig.delay() - raise Ignore('Replaced by new task') + return self.on_replace(sig) def add_to_chord(self, sig, lazy=False): """Add signature to the chord the current task is a member of. @@ -1079,6 +1074,24 @@ def after_return(self, status, retval, task_id, args, kwargs, einfo): None: The return value of this handler is ignored. """ + def on_replace(self, sig): + """Handler called when the task is replaced. + + Must return super().on_replace(sig) when overriding to ensure the task replacement + is properly handled. + + .. versionadded:: 5.3 + + Arguments: + sig (Signature): signature to replace with. + """ + # Finally, either apply or delay the new signature! + if self.request.is_eager: + return sig.apply().get() + else: + sig.delay() + raise Ignore('Replaced by new task') + def add_trail(self, result): if self.trail: self.request.children.append(result) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 2300d423976..2a5f08d6c4f 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -9,6 +9,7 @@ from celery import Task, group, uuid from celery.app.task import _reprtask +from celery.canvas import StampingVisitor, signature from celery.contrib.testing.mocks import ContextMock from celery.exceptions import Ignore, ImproperlyConfigured, Retry from celery.result import AsyncResult, EagerResult @@ -1059,6 +1060,24 @@ def test_send_event(self): 'task-foo', uuid='fb', id=3122, retry=True, retry_policy=self.app.conf.task_publish_retry_policy) + @pytest.mark.usefixtures('depends_on_current_app') + def test_on_replace(self): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + class MyTask(Task): + def on_replace(self, sig): + sig.stamp(CustomStampingVisitor()) + return super().on_replace(sig) + + mytask = self.app.task(shared=False, base=MyTask)(return_True) + + sig1 = signature('sig1') + with pytest.raises(Ignore): + mytask.replace(sig1) + assert sig1.options['header'] == 'value' + def test_replace(self): sig1 = MagicMock(name='sig1') sig1.options = {} From be1d3c086d5059f9ac261744909b8c624a9b0983 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 17:04:02 +0000 Subject: [PATCH 0228/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.1.0 → v3.2.0](https://github.com/asottile/pyupgrade/compare/v3.1.0...v3.2.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b127d2a3097..f91e4309713 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.1.0 + rev: v3.2.0 hooks: - id: pyupgrade args: ["--py37-plus"] From 720d1928c4b583f36ca0cce7607b616466f2ffbb Mon Sep 17 00:00:00 2001 From: Hank Ehly Date: Wed, 2 Nov 2022 12:36:58 -0500 Subject: [PATCH 0229/1051] Add clarifying information to completed_count documentation (#7873) * Add clarifying information to completed_count docstring * Update canvas documentation --- celery/result.py | 5 ++++- docs/userguide/canvas.rst | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/celery/result.py b/celery/result.py index 3dcd02523ee..eb3e154933b 100644 --- a/celery/result.py +++ b/celery/result.py @@ -651,8 +651,11 @@ def ready(self): def completed_count(self): """Task completion count. + Note that `complete` means `successful` in this context. In other words, the + return value of this method is the number of ``successful`` tasks. + Returns: - int: the number of tasks completed. + int: the number of complete (i.e. successful) tasks. """ return sum(int(result.successful()) for result in self.results) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 29046839f34..863c9a81c71 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -797,7 +797,9 @@ It supports the following operations: * :meth:`~celery.result.GroupResult.completed_count` - Return the number of completed subtasks. + Return the number of completed subtasks. Note that `complete` means `successful` in + this context. In other words, the return value of this method is the number of + ``successful`` tasks. * :meth:`~celery.result.GroupResult.revoke` From f64b3371d0ed2e104db438a89e956d550ac98e86 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 3 Nov 2022 05:20:04 +0200 Subject: [PATCH 0230/1051] Stabilized test_revoked_by_headers_complex_canvas (#7877) --- t/integration/test_tasks.py | 86 ++++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 40 deletions(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index ee131e02622..f681da01b61 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -231,53 +231,59 @@ def on_signature(self, sig, **headers) -> dict: # so we let it run last in the suite to avoid # affecting other tests until we can fix it. @pytest.mark.order("last") - @pytest.mark.parametrize('monitoring_id', [ - "4242", - [1234, uuid4().hex], - ]) - def test_revoked_by_headers_complex_canvas(self, manager, subtests, monitoring_id): + @flaky + def test_revoked_by_headers_complex_canvas(self, manager, subtests): """Testing revoking of task using a stamped header""" try: manager.app.backend.ensure_chords_allowed() except NotImplementedError as e: raise pytest.skip(e.args[0]) - target_monitoring_id = isinstance(monitoring_id, list) and monitoring_id[0] or monitoring_id + for monitoring_id in ["4242", [1234, uuid4().hex]]: - class MonitoringIdStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'monitoring_id': target_monitoring_id, 'stamped_headers': ['monitoring_id']} - - stamped_task = sleeping.si(4) - stamped_task.stamp(visitor=MonitoringIdStampingVisitor()) - result = stamped_task.freeze() - - canvas = [ - group([stamped_task]), - chord(group([stamped_task]), sleeping.si(2)), - chord(group([sleeping.si(2)]), stamped_task), - chain(stamped_task), - group([sleeping.si(2), stamped_task, sleeping.si(2)]), - chord([sleeping.si(2), stamped_task], sleeping.si(2)), - chord([sleeping.si(2), sleeping.si(2)], stamped_task), - chain(sleeping.si(2), stamped_task), - chain(sleeping.si(2), group([sleeping.si(2), stamped_task, sleeping.si(2)])), - chain(sleeping.si(2), group([sleeping.si(2), stamped_task]), sleeping.si(2)), - chain(sleeping.si(2), group([sleeping.si(2), sleeping.si(2)]), stamped_task), - ] - - result.revoke_by_stamped_headers(headers={'monitoring_id': monitoring_id}) - - for sig in canvas: - sig_result = sig.apply_async() - with subtests.test(msg='Testing if task was revoked'): - with pytest.raises(celery.exceptions.TaskRevokedError): - sig_result.get() - assert result.status == 'REVOKED' - assert result.ready() is True - assert result.failed() is False - assert result.successful() is False - worker_state.revoked_headers.clear() + # Try to purge the queue before we start + # to attempt to avoid interference from other tests + while True: + count = manager.app.control.purge() + if count == 0: + break + + target_monitoring_id = isinstance(monitoring_id, list) and monitoring_id[0] or monitoring_id + + class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': target_monitoring_id, 'stamped_headers': ['monitoring_id']} + + stamped_task = sleeping.si(4) + stamped_task.stamp(visitor=MonitoringIdStampingVisitor()) + result = stamped_task.freeze() + + canvas = [ + group([stamped_task]), + chord(group([stamped_task]), sleeping.si(2)), + chord(group([sleeping.si(2)]), stamped_task), + chain(stamped_task), + group([sleeping.si(2), stamped_task, sleeping.si(2)]), + chord([sleeping.si(2), stamped_task], sleeping.si(2)), + chord([sleeping.si(2), sleeping.si(2)], stamped_task), + chain(sleeping.si(2), stamped_task), + chain(sleeping.si(2), group([sleeping.si(2), stamped_task, sleeping.si(2)])), + chain(sleeping.si(2), group([sleeping.si(2), stamped_task]), sleeping.si(2)), + chain(sleeping.si(2), group([sleeping.si(2), sleeping.si(2)]), stamped_task), + ] + + result.revoke_by_stamped_headers(headers={'monitoring_id': monitoring_id}) + + for sig in canvas: + sig_result = sig.apply_async() + with subtests.test(msg='Testing if task was revoked'): + with pytest.raises(celery.exceptions.TaskRevokedError): + sig_result.get() + assert result.status == 'REVOKED' + assert result.ready() is True + assert result.failed() is False + assert result.successful() is False + worker_state.revoked_headers.clear() @flaky def test_wrong_arguments(self, manager): From 9859a5e8ab4d3a40056bd2b09a8c3bd88f1be4f8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 3 Nov 2022 11:58:46 +0200 Subject: [PATCH 0231/1051] Enhanced `StampingVisitor` with `on_callback()` and `on_errback()`, that will (#7867) be used in a new `Signature.stamp_links()` to apply the visitor's stamping on the signature's callbacks and errbacks (if exists), per the implementation of these methods in the custom visitor class --- celery/canvas.py | 73 +++++++++++++++++++++++++++++++--- docs/userguide/canvas.rst | 78 +++++++++++++++++++++++++++++++++++-- t/unit/tasks/test_canvas.py | 12 ++++-- 3 files changed, 150 insertions(+), 13 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index c1e59e54a5b..30cc58a83e4 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -176,6 +176,28 @@ def on_chord_body(self, chord, **header) -> dict: """ return {} + def on_callback(self, callback, **header) -> dict: + """Method that is called on callback stamping. + + Arguments: + callback (Signature): callback that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + return {} + + def on_errback(self, errback, **header) -> dict: + """Method that is called on errback stamping. + + Arguments: + errback (Signature): errback that is stamped. + headers (Dict): Partial headers that could be merged with existing headers. + Returns: + Dict: headers to update. + """ + return {} + class GroupStampingVisitor(StampingVisitor): """ @@ -499,21 +521,58 @@ def stamp(self, visitor=None, **headers): visitor (StampingVisitor): Visitor API object. headers (Dict): Stamps that should be added to headers. """ + self.stamp_links(visitor, **headers) + headers = headers.copy() if visitor is not None: visitor_headers = visitor.on_signature(self, **headers) if "stamped_headers" not in visitor_headers: visitor_headers["stamped_headers"] = list(visitor_headers.keys()) - headers.update(visitor_headers) + _merge_dictionaries(headers, visitor_headers) else: headers["stamped_headers"] = [header for header in headers.keys() if header not in self.options] _merge_dictionaries(headers, self.options) + # Preserve previous stamped headers stamped_headers = set(self.options.get("stamped_headers", [])) stamped_headers.update(headers["stamped_headers"]) headers["stamped_headers"] = list(stamped_headers) return self.set(**headers) + def stamp_links(self, visitor, **headers): + """Stamp this signature links (callbacks and errbacks). + Using a visitor will pass on responsibility for the stamping + to the visitor. + + Arguments: + visitor (StampingVisitor): Visitor API object. + headers (Dict): Stamps that should be added to headers. + """ + if not visitor: + return + + non_visitor_headers = headers.copy() + + # Stamp all of the callbacks of this signature + headers = non_visitor_headers.copy() + for link in self.options.get('link', []) or []: + visitor_headers = visitor.on_callback(link, **headers) + if visitor_headers and "stamped_headers" not in visitor_headers: + visitor_headers["stamped_headers"] = list(visitor_headers.keys()) + headers.update(visitor_headers or {}) + link = maybe_signature(link, app=self.app) + link.stamp(visitor=visitor, **headers) + + # Stamp all of the errbacks of this signature + headers = non_visitor_headers.copy() + for link in self.options.get('link_error', []) or []: + visitor_headers = visitor.on_errback(link, **headers) + if visitor_headers and "stamped_headers" not in visitor_headers: + visitor_headers["stamped_headers"] = list(visitor_headers.keys()) + headers.update(visitor_headers or {}) + link = maybe_signature(link, app=self.app) + link.stamp(visitor=visitor, **headers) + def _with_list_option(self, key): items = self.options.setdefault(key, []) if not isinstance(items, MutableSequence): @@ -842,11 +901,13 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, groups = self.options.get("groups") stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + visitor = GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers) + self.stamp(visitor=visitor) if results_from_prepare: if link: tasks[0].extend_list_option('link', link) + tasks[0].stamp_links(visitor=visitor) first_task = tasks.pop() options = _prepare_chain_from_options(options, tasks, use_link) @@ -1660,14 +1721,14 @@ def freeze(self, _id=None, group_id=None, chord=None, return body_result def stamp(self, visitor=None, **headers): - if visitor is not None: - headers.update(visitor.on_chord_header_start(self, **headers)) - super().stamp(visitor=visitor, **headers) - tasks = self.tasks if isinstance(tasks, group): tasks = tasks.tasks + if visitor is not None: + headers.update(visitor.on_chord_header_start(self, **headers)) + super().stamp(visitor=visitor, **headers) + if isinstance(tasks, _regen): tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) else: diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 863c9a81c71..14f7d5f6e9d 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1159,7 +1159,7 @@ For example, >>> sig1_res = sig1.freeze() >>> g = group(sig1, add.si(3, 3)) >>> g.stamp(stamp='your_custom_stamp') - >>> res = g1.apply_async() + >>> res = g.apply_async() >>> res.get(timeout=TIMEOUT) [4, 6] >>> sig1_res._get_task_meta()['stamp'] @@ -1228,7 +1228,7 @@ The following example shows another custom stamping visitor, which labels all tasks with a custom ``monitoring_id`` which can represent a UUID value of an external monitoring system, that can be used to track the task execution by including the id with such a visitor implementation. This ``monitoring_id`` can be a randomly generated UUID, or a unique identifier of the span id used by -the external monitoring system. +the external monitoring system, etc. .. code-block:: python @@ -1265,4 +1265,76 @@ Next, lets see how to use the ``MonitoringIdStampingVisitor`` example stamping v chain_example = chain(signature('t1'), group(signature('t2'), signature('t3')), signature('t4')) chain_example.stamp(visitor=MonitoringIdStampingVisitor()) -Lastly, it's important to mention that each monitoring id stamp in the example above would be different from each other between tasks. \ No newline at end of file +Lastly, it's important to mention that each monitoring id stamp in the example above would be different from each other between tasks. + +Callbacks stamping +------------------ + +The stamping API also supports stamping callbacks implicitly. +This means that when a callback is added to a task, the stamping +visitor will be applied to the callback as well. + +.. warning:: + + The callback must be linked to the signature before stamping. + +For example, lets examine the following custome stamping visitor. + +.. code-block:: python + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + def on_callback(self, callback, **header) -> dict: + return {'on_callback': True} + + def on_errback(self, errback, **header) -> dict: + return {'on_errback': True} + +This custom stamping visitor will stamp the signature, callbacks, and errbacks with ``{'header': 'value'}`` +and stamp the callbacks and errbacks with ``{'on_callback': True}`` and ``{'on_errback': True}`` respectively as shown below. + +.. code-block:: python + + c = chord([add.s(1, 1), add.s(2, 2)], xsum.s()) + callback = signature('sig_link') + errback = signature('sig_link_error') + c.link(callback) + c.link_error(errback) + c.stamp(visitor=CustomStampingVisitor()) + +This example will result in the following stamps: + +.. code-block:: python + + >>> c.options + {'header': 'value', 'stamped_headers': ['header']} + >>> c.tasks.tasks[0].options + {'header': 'value', 'stamped_headers': ['header']} + >>> c.tasks.tasks[1].options + {'header': 'value', 'stamped_headers': ['header']} + >>> c.body.options + {'header': 'value', 'stamped_headers': ['header']} + >>> c.body.options['link'][0].options + {'header': 'value', 'on_callback': True, 'stamped_headers': ['header', 'on_callback']} + >>> c.body.options['link_error'][0].options + {'header': 'value', 'on_errback': True, 'stamped_headers': ['header', 'on_errback']} + +When calling ``apply_async()`` on ``c``, the group stamping will be applied on top of the above stamps. +This will result in the following stamps: + +.. code-block:: python + + >>> c.options + {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} + >>> c.tasks.tasks[0].options + {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} + >>> c.tasks.tasks[1].options + {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} + >>> c.body.options + {'header': 'value', 'groups': [], 'stamped_headers': ['header', 'groups']} + >>> c.body.options['link'][0].options + {'header': 'value', 'on_callback': True, 'groups': [], 'stamped_headers': ['header', 'on_callback', 'groups']} + >>> c.body.options['link_error'][0].options + {'header': 'value', 'on_errback': True, 'groups': [], 'stamped_headers': ['header', 'on_errback', 'groups']} \ No newline at end of file diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 7ec18f5ea78..08ed8a2f9a5 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -6,8 +6,8 @@ import pytest_subtests # noqa: F401 from celery._state import _task_stack -from celery.canvas import (Signature, StampingVisitor, _chain, _maybe_group, chain, chord, chunks, group, - maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) +from celery.canvas import (GroupStampingVisitor, Signature, StampingVisitor, _chain, _maybe_group, chain, chord, + chunks, group, maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) from celery.result import AsyncResult, EagerResult, GroupResult SIG = Signature({ @@ -636,11 +636,15 @@ def s(*args, **kwargs): assert c.tasks[-1].options['chord'] == 'some_chord_id' c.apply_async(link=[s(32)]) - assert c.tasks[-1].options['link'] == [s(32)] + expected_sig = s(32) + expected_sig.stamp(visitor=GroupStampingVisitor()) + assert c.tasks[-1].options['link'] == [expected_sig] c.apply_async(link_error=[s('error')]) + expected_sig = s('error') + expected_sig.stamp(visitor=GroupStampingVisitor()) for task in c.tasks: - assert task.options['link_error'] == [s('error')] + assert task.options['link_error'] == [expected_sig] def test_apply_options_none(self): class static(Signature): From d5a1776bb0106c064df9c3caf9e0888d61de78ff Mon Sep 17 00:00:00 2001 From: hankehly Date: Thu, 3 Nov 2022 06:01:20 -0500 Subject: [PATCH 0232/1051] Add -r flag to xargs commands --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 2ffdc12a340..4b64f228e5d 100644 --- a/Makefile +++ b/Makefile @@ -126,8 +126,8 @@ $(CONTRIBUTING): contrib: clean-contrib $(CONTRIBUTING) clean-pyc: - -find . -type f -a \( -name "*.pyc" -o -name "*$$py.class" \) | xargs rm - -find . -type d -name "__pycache__" | xargs rm -r + -find . -type f -a \( -name "*.pyc" -o -name "*$$py.class" \) | xargs -r rm + -find . -type d -name "__pycache__" | xargs -r rm -r removepyc: clean-pyc From ad9e5c714a57bb7c4146b25aeecc837404c2f6e1 Mon Sep 17 00:00:00 2001 From: Manuel Weitzman Date: Fri, 28 Oct 2022 13:34:00 -0300 Subject: [PATCH 0233/1051] Add --skip-checks flag to bypass django core checks --- celery/bin/celery.py | 10 +++++++++- celery/fixups/django.py | 3 ++- t/unit/fixups/test_django.py | 10 ++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index 65f53f37390..dfe8c7f2d60 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -131,9 +131,15 @@ def convert(self, value, param, ctx): cls=CeleryOption, is_flag=True, help_group="Global Options") +@click.option('--skip-checks', + envvar='SKIP_CHECKS', + cls=CeleryOption, + is_flag=True, + help_group="Global Options", + help="Skip Django core checks on startup.") @click.pass_context def celery(ctx, app, broker, result_backend, loader, config, workdir, - no_color, quiet, version): + no_color, quiet, version, skip_checks): """Celery command entrypoint.""" if version: click.echo(VERSION_BANNER) @@ -151,6 +157,8 @@ def celery(ctx, app, broker, result_backend, loader, config, workdir, os.environ['CELERY_RESULT_BACKEND'] = result_backend if config: os.environ['CELERY_CONFIG_MODULE'] = config + if skip_checks: + os.environ['CELERY_SKIP_CHECKS'] = skip_checks ctx.obj = CLIContext(app=app, no_color=no_color, workdir=workdir, quiet=quiet) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 05a41663b96..473c3b676b4 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -133,7 +133,8 @@ def django_setup(self) -> None: def validate_models(self) -> None: from django.core.checks import run_checks self.django_setup() - run_checks() + if not os.environ.get('CELERY_SKIP_CHECKS'): + run_checks() def install(self) -> "DjangoWorkerFixup": signals.beat_embedded_init.connect(self.close_database) diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 3f13970e033..07f94c6b813 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -263,10 +263,20 @@ def test_validate_models(self, patching, module): f.django_setup = Mock(name='django.setup') patching.modules('django.core.checks') from django.core.checks import run_checks + f.validate_models() f.django_setup.assert_called_with() run_checks.assert_called_with() + # test --skip-checks flag + f.django_setup.reset_mock() + run_checks.reset_mock() + + patching.setenv('CELERY_SKIP_CHECKS', True) + f.validate_models() + f.django_setup.assert_called_with() + run_checks.assert_not_called() + def test_django_setup(self, patching): patching('celery.fixups.django.symbol_by_name') patching('celery.fixups.django.import_module') From 258f12b70c9c63b56569debaf927f0fb1fc52d13 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Fri, 4 Nov 2022 05:22:38 -0500 Subject: [PATCH 0234/1051] Scheduled weekly dependency update for week 44 (#7868) * Pin pytest to latest version 7.2.0 * Update pytest-subtests from 0.8.0 to 0.9.0 * Pin elasticsearch to latest version 8.4.3 * Update zstandard from 0.18.0 to 0.19.0 * Update pycurl from 7.43.0.5 to 7.45.1 * elasticsearch<8.0 * pycurl==7.43.0.5 Co-authored-by: Asif Saif Uddin --- requirements/extras/zstd.txt | 2 +- requirements/test.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index 73def0e68be..f702f7f0bda 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.18.0 +zstandard==0.19.0 diff --git a/requirements/test.txt b/requirements/test.txt index 1b4a57ab118..9e6362c6ab1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ -pytest~=7.1.1 +pytest==7.2.0 pytest-celery==0.0.0 -pytest-subtests==0.8.0 +pytest-subtests==0.9.0 pytest-timeout~=2.1.0 pytest-click==1.1.0 pytest-order==1.0.1 From 20a9cb40a40c3e9b02b8015674c51ac7023c75d3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 7 Nov 2022 10:31:40 +0200 Subject: [PATCH 0235/1051] Added two new unit tests: - test_callback_stamping - test_callback_stamping_on_replace --- t/unit/tasks/test_canvas.py | 140 +++++++++++++++++++++++++++++++++++- 1 file changed, 139 insertions(+), 1 deletion(-) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 08ed8a2f9a5..493ce04d50a 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -5,9 +5,11 @@ import pytest import pytest_subtests # noqa: F401 +from celery import Task from celery._state import _task_stack from celery.canvas import (GroupStampingVisitor, Signature, StampingVisitor, _chain, _maybe_group, chain, chord, chunks, group, maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) +from celery.exceptions import Ignore from celery.result import AsyncResult, EagerResult, GroupResult SIG = Signature({ @@ -23,6 +25,11 @@ from collections.abc import Iterable +def return_True(*args, **kwargs): + # Task run functions can't be closures/lambdas, as they're pickled. + return True + + class test_maybe_unroll_group: def test_when_no_len_and_no_length_hint(self): @@ -173,7 +180,6 @@ def test_twice_stamping(self, subtests): with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp", "groups"]) - @pytest.mark.usefixtures('depends_on_current_app') def test_manual_stamping(self): """ Test manual signature stamping. @@ -229,6 +235,138 @@ def on_signature(self, sig, **headers) -> dict: with subtests.test("sig_2 is stamped with custom visitor", header=["value"]): assert sig_2_res._get_task_meta()["header"] == ["value"] + @pytest.mark.usefixtures('depends_on_current_app') + def test_callback_stamping(self, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + def on_callback(self, callback, **header) -> dict: + return {'on_callback': True} + + def on_errback(self, errback, **header) -> dict: + return {'on_errback': True} + + sig_1 = self.add.s(0, 1) + sig_1_res = sig_1.freeze() + group_sig = group([self.add.s(3), self.add.s(4)]) + group_sig_res = group_sig.freeze() + chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) + chord_sig_res = chord_sig.freeze() + sig_2 = self.add.s(2) + sig_2_res = sig_2.freeze() + chain_sig = chain( + sig_1, # --> 1 + group_sig, # --> [1+3, 1+4] --> [4, 5] + chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 + sig_2 # --> 18 + 2 --> 20 + ) + callback = signature('callback_task') + errback = signature('errback_task') + chain_sig.link(callback) + chain_sig.link_error(errback) + chain_sig.stamp(visitor=CustomStampingVisitor()) + chain_sig_res = chain_sig.apply_async() + chain_sig_res.get() + + with subtests.test("Confirm the chain was executed correctly", result=20): + # Before we run our assersions, let's confirm the base functionality of the chain is working + # as expected including the links stamping. + assert chain_sig_res.result == 20 + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): + for result in group_sig_res.results: + assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("callback is stamped with custom visitor", + stamped_headers=["header", "groups, on_callback"]): + callback_link = chain_sig.options['link'][0] + headers = callback_link.options + stamped_headers = headers['stamped_headers'] + assert sorted(stamped_headers) == sorted(["header", "groups", "on_callback"]) + assert headers['on_callback'] is True + assert headers['header'] == 'value' + + with subtests.test("errback is stamped with custom visitor", + stamped_headers=["header", "groups, on_errback"]): + errback_link = chain_sig.options['link_error'][0] + headers = errback_link.options + stamped_headers = headers['stamped_headers'] + assert sorted(stamped_headers) == sorted(["header", "groups", "on_errback"]) + assert headers['on_errback'] is True + assert headers['header'] == 'value' + + @pytest.mark.usefixtures('depends_on_current_app') + def test_callback_stamping_on_replace(self, subtests): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + def on_callback(self, callback, **header) -> dict: + return {'on_callback': True} + + def on_errback(self, errback, **header) -> dict: + return {'on_errback': True} + + class MyTask(Task): + def on_replace(self, sig): + sig.stamp(CustomStampingVisitor()) + return super().on_replace(sig) + + mytask = self.app.task(shared=False, base=MyTask)(return_True) + + sig1 = signature('sig1') + callback = signature('callback_task') + errback = signature('errback_task') + sig1.link(callback) + sig1.link_error(errback) + + with subtests.test("callback is not stamped with custom visitor yet"): + callback_link = sig1.options['link'][0] + headers = callback_link.options + assert 'on_callback' not in headers + assert 'header' not in headers + + with subtests.test("errback is not stamped with custom visitor yet"): + errback_link = sig1.options['link_error'][0] + headers = errback_link.options + assert 'on_errback' not in headers + assert 'header' not in headers + + with pytest.raises(Ignore): + mytask.replace(sig1) + + with subtests.test("callback is stamped with custom visitor", + stamped_headers=["header", "groups, on_callback"]): + callback_link = sig1.options['link'][0] + headers = callback_link.options + stamped_headers = headers['stamped_headers'] + assert sorted(stamped_headers) == sorted(["header", "groups", "on_callback"]) + assert headers['on_callback'] is True + assert headers['header'] == 'value' + + with subtests.test("errback is stamped with custom visitor", + stamped_headers=["header", "groups, on_errback"]): + errback_link = sig1.options['link_error'][0] + headers = errback_link.options + stamped_headers = headers['stamped_headers'] + assert sorted(stamped_headers) == sorted(["header", "groups", "on_errback"]) + assert headers['on_errback'] is True + assert headers['header'] == 'value' + def test_getitem_property_class(self): assert Signature.task assert Signature.args From e7f6ef9c40b432fe8e3c961453e8b15bd5aba22b Mon Sep 17 00:00:00 2001 From: Mathias Ertl Date: Sat, 5 Nov 2022 11:10:33 +0100 Subject: [PATCH 0236/1051] use inspect.signature to make extension Python 3.11 compatible --- celery/contrib/sphinx.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/celery/contrib/sphinx.py b/celery/contrib/sphinx.py index e9d7119094d..a3a06479ccf 100644 --- a/celery/contrib/sphinx.py +++ b/celery/contrib/sphinx.py @@ -30,7 +30,7 @@ Use ``.. autotask::`` to alternatively manually document a task. """ -from inspect import formatargspec, getfullargspec +from inspect import signature from sphinx.domains.python import PyFunction from sphinx.ext.autodoc import FunctionDocumenter @@ -51,12 +51,10 @@ def can_document_member(cls, member, membername, isattr, parent): def format_args(self): wrapped = getattr(self.object, '__wrapped__', None) if wrapped is not None: - argspec = getfullargspec(wrapped) - if argspec[0] and argspec[0][0] in ('cls', 'self'): - del argspec[0][0] - fmt = formatargspec(*argspec) - fmt = fmt.replace('\\', '\\\\') - return fmt + sig = signature(wrapped) + if "self" in sig.parameters or "cls" in sig.parameters: + sig = sig.replace(parameters=list(sig.parameters.values())[1:]) + return str(sig) return '' def document_members(self, all_members=False): From 9565f494795c081bbe0d71286775eea63baf4b0f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 8 Nov 2022 10:08:43 +0600 Subject: [PATCH 0237/1051] cryptography==38.0.3 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index bd312a3a72c..388c40441b4 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==38.0.2 +cryptography==38.0.3 From 3a7a82af9588629dad5807e0862bacbbd5d7a7f2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 8 Nov 2022 17:41:01 +0200 Subject: [PATCH 0238/1051] Canvas.py doc enhancement (#7889) * Enhanced doc for canvas.maybe_unroll_group() * Enhanced doc for canvas._stamp_regen_task() * Enhanced doc for canvas._merge_dictionaries() --- celery/canvas.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index 30cc58a83e4..3d09d1879c5 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -40,7 +40,9 @@ def maybe_unroll_group(group): - """Unroll group with only one member.""" + """Unroll group with only one member. + This allows treating a group of a single task as if it + was a single task without pre-knowledge.""" # Issue #1656 try: size = len(group.tasks) @@ -60,11 +62,29 @@ def task_name_from(task): def _stamp_regen_task(task, visitor, **headers): + """When stamping a sequence of tasks created by a generator, + we use this function to stamp each task in the generator + without exhausting it.""" + task.stamp(visitor=visitor, **headers) return task def _merge_dictionaries(d1, d2): + """Merge two dictionaries recursively into the first one. + + Example: + >>> d1 = {'dict': {'a': 1}, 'list': [1, 2], 'tuple': (1, 2)} + >>> d2 = {'dict': {'b': 2}, 'list': [3, 4], 'set': {'a', 'b'}} + >>> _merge_dictionaries(d1, d2) + + d1 will be modified to: { + 'dict': {'a': 1, 'b': 2}, + 'list': [1, 2, 3, 4], + 'tuple': (1, 2), + 'set': {'a', 'b'} + } + """ for key, value in d1.items(): if key in d2: if isinstance(value, dict): From eee997513092c26eff5a7678674a6d0f6a02c44c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sondre=20Lilleb=C3=B8=20Gundersen?= Date: Wed, 9 Nov 2022 00:26:37 +0100 Subject: [PATCH 0239/1051] Fix typo --- celery/beat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/beat.py b/celery/beat.py index 4c9486532e3..a3d13adafb3 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -46,7 +46,7 @@ class SchedulingError(Exception): class BeatLazyFunc: - """An lazy function declared in 'beat_schedule' and called before sending to worker. + """A lazy function declared in 'beat_schedule' and called before sending to worker. Example: From 6f1691b42d1df02c5657f700fe7b13e4ebde5332 Mon Sep 17 00:00:00 2001 From: hsk17 Date: Wed, 9 Nov 2022 05:25:15 +0100 Subject: [PATCH 0240/1051] fix typos in optional tests (#7876) * Update test_schedules.py * Update test_cache.py --- t/unit/app/test_schedules.py | 2 +- t/unit/backends/test_cache.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 8f49b5963b0..71b1dba71fb 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -26,7 +26,7 @@ def patch_crontab_nowfun(cls, retval): class test_solar: def setup(self): - pytest.importorskip('ephem0') + pytest.importorskip('ephem') self.s = solar('sunrise', 60, 30, app=self.app) def test_reduce(self): diff --git a/t/unit/backends/test_cache.py b/t/unit/backends/test_cache.py index 40ae4277331..79b5b69ed1c 100644 --- a/t/unit/backends/test_cache.py +++ b/t/unit/backends/test_cache.py @@ -143,7 +143,7 @@ def test_as_uri_multiple_servers(self): assert b.as_uri() == backend def test_regression_worker_startup_info(self): - pytest.importorskip('memcached') + pytest.importorskip('memcache') self.app.conf.result_backend = ( 'cache+memcached://127.0.0.1:11211;127.0.0.2:11211;127.0.0.3/' ) From eabd70172fa44e39f9e9d941ab4ca8a7176162fc Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 9 Nov 2022 14:47:09 +0200 Subject: [PATCH 0241/1051] Enhanced doc for canvas.Signature class (#7891) --- celery/canvas.py | 75 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 3d09d1879c5..6e5969fe0f7 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -311,6 +311,12 @@ class Signature(dict): @classmethod def register_type(cls, name=None): + """Register a new type of signature. + Used as a class decorator, for example: + >>> @Signature.register_type() + >>> class mysig(Signature): + >>> pass + """ def _inner(subclass): cls.TYPES[name or subclass.__name__] = subclass return subclass @@ -319,6 +325,10 @@ def _inner(subclass): @classmethod def from_dict(cls, d, app=None): + """Create a new signature from a dict. + Subclasses can override this method to customize how are + they created from a dict. + """ typ = d.get('subtask_type') if typ: target_cls = cls.TYPES[typ] @@ -413,6 +423,24 @@ def apply_async(self, args=None, kwargs=None, route_name=None, **options): return _apply(args, kwargs, **options) def _merge(self, args=None, kwargs=None, options=None, force=False): + """Merge partial args/kwargs/options with existing ones. + + If the signature is immutable and ``force`` is False, the existing + args/kwargs will be returned as-is and only the options will be merged. + + Stamped headers are considered immutable and will not be merged regardless. + + Arguments: + args (Tuple): Partial args to be prepended to the existing args. + kwargs (Dict): Partial kwargs to be merged with existing kwargs. + options (Dict): Partial options to be merged with existing options. + force (bool): If True, the args/kwargs will be merged even if the signature is + immutable. The stamped headers are not affected by this option and will not + be merged regardless. + + Returns: + Tuple: (args, kwargs, options) + """ args = args if args else () kwargs = kwargs if kwargs else {} if options is not None: @@ -423,6 +451,7 @@ def _merge(self, args=None, kwargs=None, options=None, force=False): immutable_options = self._IMMUTABLE_OPTIONS if "stamped_headers" in self.options: immutable_options = self._IMMUTABLE_OPTIONS.union(set(self.options["stamped_headers"])) + # merge self.options with options without overriding stamped headers from self.options new_options = {**self.options, **{ k: v for k, v in options.items() if k not in immutable_options or k not in self.options @@ -471,6 +500,18 @@ def freeze(self, _id=None, group_id=None, chord=None, twice after freezing it as that'll result in two task messages using the same task id. + The arguments are used to override the signature's headers during + freezing. + + Arguments: + _id (str): Task id to use if it didn't already have one. + New UUID is generated if not provided. + group_id (str): Group id to use if it didn't already have one. + chord (Signature): Chord body when freezing a chord header. + root_id (str): Root id to use. + parent_id (str): Parent id to use. + group_index (int): Group index to use. + Returns: ~@AsyncResult: promise of future evaluation. """ @@ -594,18 +635,34 @@ def stamp_links(self, visitor, **headers): link.stamp(visitor=visitor, **headers) def _with_list_option(self, key): + """Gets the value at the given self.options[key] as a list. + + If the value is not a list, it will be converted to one and saved in self.options. + If the key does not exist, an empty list will be set and returned instead. + + Arguments: + key (str): The key to get the value for. + + Returns: + List: The value at the given key as a list or an empty list if the key does not exist. + """ items = self.options.setdefault(key, []) if not isinstance(items, MutableSequence): items = self.options[key] = [items] return items def append_to_list_option(self, key, value): + """Appends the given value to the list at the given key in self.options.""" items = self._with_list_option(key) if value not in items: items.append(value) return value def extend_list_option(self, key, value): + """Extends the list at the given key in self.options with the given value. + + If the value is not a list, it will be converted to one. + """ items = self._with_list_option(key) items.extend(maybe_list(value)) @@ -652,6 +709,14 @@ def flatten_links(self): ))) def __or__(self, other): + """Chaining operator. + + Example: + >>> add.s(2, 2) | add.s(4) | add.s(8) + + Returns: + chain: Constructs a :class:`~celery.canvas.chain` of the given signatures. + """ if isinstance(other, _chain): # task | chain -> chain return _chain(seq_concat_seq( @@ -685,6 +750,16 @@ def election(self): return type.AsyncResult(tid) def reprcall(self, *args, **kwargs): + """Return a string representation of the signature. + + Merges the given arguments with the signature's arguments + only for the purpose of generating the string representation. + The signature itself is not modified. + + Example: + >>> add.s(2, 2).reprcall() + 'add(2, 2)' + """ args, kwargs, _ = self._merge(args, kwargs, {}, force=True) return reprcall(self['task'], args, kwargs) From dc7cdc2576d2015d4c72039a43a6aa6aebaf69c6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 10 Nov 2022 08:11:57 +0200 Subject: [PATCH 0242/1051] Fix revoke by headers tests stability (#7892) * Fix for test_revoked_by_headers_simple_canvas() * Fix for test_revoked_by_headers_complex_canvas() --- t/integration/test_tasks.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index f681da01b61..5eea4d88e9e 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -200,6 +200,13 @@ def test_revoked(self, manager): def test_revoked_by_headers_simple_canvas(self, manager): """Testing revoking of task using a stamped header""" + # Try to purge the queue before we start + # to attempt to avoid interference from other tests + while True: + count = manager.app.control.purge() + if count == 0: + break + target_monitoring_id = uuid4().hex class MonitoringIdStampingVisitor(StampingVisitor): @@ -227,11 +234,13 @@ def on_signature(self, sig, **headers) -> dict: assert result.successful() is True worker_state.revoked_headers.clear() - # This test leaves the environment dirty, - # so we let it run last in the suite to avoid - # affecting other tests until we can fix it. - @pytest.mark.order("last") - @flaky + # Try to purge the queue after we're done + # to attempt to avoid interference to other tests + while True: + count = manager.app.control.purge() + if count == 0: + break + def test_revoked_by_headers_complex_canvas(self, manager, subtests): """Testing revoking of task using a stamped header""" try: @@ -285,6 +294,13 @@ def on_signature(self, sig, **headers) -> dict: assert result.successful() is False worker_state.revoked_headers.clear() + # Try to purge the queue after we're done + # to attempt to avoid interference to other tests + while True: + count = manager.app.control.purge() + if count == 0: + break + @flaky def test_wrong_arguments(self, manager): """Tests that proper exceptions are raised when task is called with wrong arguments.""" From 41e79a9ed45bd80d791c116408c64e833a6c57d0 Mon Sep 17 00:00:00 2001 From: Kaustav Banerjee Date: Thu, 10 Nov 2022 18:20:59 +0530 Subject: [PATCH 0243/1051] feat: add global keyprefix for backend result keys (#7620) * feat: add global keyprefix for result keys * docs: added documentation for global keyprefix for result backend --- CONTRIBUTORS.txt | 1 + celery/backends/base.py | 16 +++++++++++++++- .../backends-and-brokers/redis.rst | 18 ++++++++++++++++++ t/unit/backends/test_base.py | 19 ++++++++++++++++++- 4 files changed, 52 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 4b99f190dbe..fe420b14d67 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -290,3 +290,4 @@ Gabor Boros, 2021/11/09 Tizian Seehaus, 2022/02/09 Oleh Romanovskyi, 2022/06/09 JoonHwan Kim, 2022/08/01 +Kaustav Banerjee, 2022/11/10 diff --git a/celery/backends/base.py b/celery/backends/base.py index e851c8189f6..22710cb3c56 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -817,11 +817,25 @@ class BaseKeyValueStoreBackend(Backend): def __init__(self, *args, **kwargs): if hasattr(self.key_t, '__func__'): # pragma: no cover self.key_t = self.key_t.__func__ # remove binding - self._encode_prefixes() super().__init__(*args, **kwargs) + self._add_global_keyprefix() + self._encode_prefixes() if self.implements_incr: self.apply_chord = self._apply_chord_incr + def _add_global_keyprefix(self): + """ + This method prepends the global keyprefix to the existing keyprefixes. + + This method checks if a global keyprefix is configured in `result_backend_transport_options` using the + `global_keyprefix` key. If so, then it is prepended to the task, group and chord key prefixes. + """ + global_keyprefix = self.app.conf.get('result_backend_transport_options', {}).get("global_keyprefix", None) + if global_keyprefix: + self.task_keyprefix = f"{global_keyprefix}_{self.task_keyprefix}" + self.group_keyprefix = f"{global_keyprefix}_{self.group_keyprefix}" + self.chord_keyprefix = f"{global_keyprefix}_{self.chord_keyprefix}" + def _encode_prefixes(self): self.task_keyprefix = self.key_t(self.task_keyprefix) self.group_keyprefix = self.key_t(self.group_keyprefix) diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 9d42397de57..1c583f0bb27 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -100,6 +100,24 @@ If you are using Sentinel, you should specify the master_name using the :setting app.conf.result_backend_transport_options = {'master_name': "mymaster"} +.. _redis-result-backend-global-keyprefix: + +Global keyprefix +^^^^^^^^^^^^^^^^ + +The global key prefix will be prepended to all keys used for the result backend, +which can be useful when a redis database is shared by different users. +By default, no prefix is prepended. + +To configure the global keyprefix for the Redis result backend, use the ``global_keyprefix`` key under :setting:`result_backend_transport_options`: + + +.. code-block:: python + + app.conf.result_backend_transport_options = { + 'global_keyprefix': 'my_prefix_' + } + .. _redis-result-backend-timeout: Connection timeouts diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index b9084522d25..34205caa729 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -1,10 +1,11 @@ +import copy import re from contextlib import contextmanager from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest from kombu.serialization import prepare_accept_content -from kombu.utils.encoding import ensure_bytes +from kombu.utils.encoding import bytes_to_str, ensure_bytes import celery from celery import chord, group, signature, states, uuid @@ -722,6 +723,22 @@ def test_strip_prefix(self): assert self.b._strip_prefix(x) == 'x1b34' assert self.b._strip_prefix('x1b34') == 'x1b34' + def test_global_keyprefix(self): + global_keyprefix = "test_global_keyprefix_" + app = copy.deepcopy(self.app) + app.conf.get('result_backend_transport_options', {}).update({"global_keyprefix": global_keyprefix}) + b = KVBackend(app=app) + tid = uuid() + assert bytes_to_str(b.get_key_for_task(tid)) == f"{global_keyprefix}_celery-task-meta-{tid}" + assert bytes_to_str(b.get_key_for_group(tid)) == f"{global_keyprefix}_celery-taskset-meta-{tid}" + assert bytes_to_str(b.get_key_for_chord(tid)) == f"{global_keyprefix}_chord-unlock-{tid}" + + def test_global_keyprefix_missing(self): + tid = uuid() + assert bytes_to_str(self.b.get_key_for_task(tid)) == f"celery-task-meta-{tid}" + assert bytes_to_str(self.b.get_key_for_group(tid)) == f"celery-taskset-meta-{tid}" + assert bytes_to_str(self.b.get_key_for_chord(tid)) == f"chord-unlock-{tid}" + def test_get_many(self): for is_dict in True, False: self.b.mget_returns_dict = is_dict From 1bdd5e4fac9279b46ae2d24ec1384ec8a20d1528 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 10 Nov 2022 18:02:12 +0200 Subject: [PATCH 0244/1051] Canvas.py doc enhancement (#7897) * Enhanced doc for canvas._chain.unchain_tasks() * Enhanced doc for canvas._chain.prepare_steps() * Enhanced doc for canvas._chain.run() * Update celery/canvas.py Co-authored-by: Asif Saif Uddin * Update celery/canvas.py Co-authored-by: Asif Saif Uddin * Update celery/canvas.py Co-authored-by: Asif Saif Uddin Co-authored-by: Asif Saif Uddin --- celery/canvas.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 6e5969fe0f7..7bf904fca2c 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -954,6 +954,13 @@ def clone(self, *args, **kwargs): return signature def unchain_tasks(self): + """Return a list of tasks in the chain. + + The tasks list would be cloned from the chain's tasks, + and all of the tasks would be linked to the same error callback + as the chain itself, to ensure that the correct error callback is called + if any of the (cloned) tasks of the chain fail. + """ # Clone chain's tasks assigning signatures from link_error # to each task tasks = [t.clone() for t in self.tasks] @@ -978,6 +985,12 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, task_id=None, link=None, link_error=None, publisher=None, producer=None, root_id=None, parent_id=None, app=None, group_index=None, **options): + """Executes the chain. + + Responsible for executing the chain in the correct order. + In a case of a chain of a single task, the task is executed directly + and the result is returned for that task specifically. + """ # pylint: disable=redefined-outer-name # XXX chord is also a class in outer scope. args = args if args else () @@ -989,6 +1002,7 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, args = (tuple(args) + tuple(self.args) if args and not self.immutable else self.args) + # Unpack nested chains/groups/chords tasks, results_from_prepare = self.prepare_steps( args, kwargs, self.tasks, root_id, parent_id, link_error, app, task_id, group_id, chord, group_index=group_index, @@ -999,6 +1013,8 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, visitor = GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers) self.stamp(visitor=visitor) + # For a chain of single task, execute the task directly and return the result for that task + # For a chain of multiple tasks, execute all of the tasks and return the AsyncResult for the chain if results_from_prepare: if link: tasks[0].extend_list_option('link', link) @@ -1046,6 +1062,38 @@ def prepare_steps(self, args, kwargs, tasks, last_task_id=None, group_id=None, chord_body=None, clone=True, from_dict=Signature.from_dict, group_index=None): + """Prepare the chain for execution. + + To execute a chain, we first need to unpack it correctly. + During the unpacking, we might encounter other chains, groups, or chords + which we need to unpack as well. + + For example: + chain(signature1, chain(signature2, signature3)) --> Upgrades to chain(signature1, signature2, signature3) + chain(group(signature1, signature2), signature3) --> Upgrades to chord([signature1, signature2], signature3) + + The responsibility of this method is to ensure that the chain is + correctly unpacked, and then the correct callbacks are set up along the way. + + Arguments: + args (Tuple): Partial args to be prepended to the existing args. + kwargs (Dict): Partial kwargs to be merged with existing kwargs. + tasks (List[Signature]): The tasks of the chain. + root_id (str): The id of the root task. + parent_id (str): The id of the parent task. + link_error (Union[List[Signature], Signature]): The error callback. + will be set for all tasks in the chain. + app (Celery): The Celery app instance. + last_task_id (str): The id of the last task in the chain. + group_id (str): The id of the group that the chain is a part of. + chord_body (Signature): The body of the chord, used to syncronize with the chain's + last task and the chord's body when used together. + clone (bool): Whether to clone the chain's tasks before modifying them. + from_dict (Callable): A function that takes a dict and returns a Signature. + + Returns: + Tuple[List[Signature], List[AsyncResult]]: The frozen tasks of the chain, and the async results + """ app = app or self.app # use chain message field for protocol 2 and later. # this avoids pickle blowing the stack on the recursion From d96bf9bcbc11a19bd3e98da3e4ffbc17b6d5d17c Mon Sep 17 00:00:00 2001 From: chncaption <101684156+chncaption@users.noreply.github.com> Date: Fri, 11 Nov 2022 10:17:29 +0800 Subject: [PATCH 0245/1051] update sqlalchemy 1.0.14 to 1.2.18 --- examples/django/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/django/requirements.txt b/examples/django/requirements.txt index 4ba37fb5b8a..ef6d5a6de00 100644 --- a/examples/django/requirements.txt +++ b/examples/django/requirements.txt @@ -1,3 +1,3 @@ django>=2.2.1 -sqlalchemy>=1.0.14 +sqlalchemy>=1.2.18 celery>=5.0.5 From 4e2280b12958edd0acdcd40e4bb845bbc3070791 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 14 Nov 2022 13:41:29 +0200 Subject: [PATCH 0246/1051] Canvas.py doc enhancement (#7902) * Added reference in comment to issue #6973 regarding slow downs when using a group with a generator of many tasks * Added TODO comment to group.skew() to consider removing it * Enhanced doc for canvas.group.from_dict() * Enhanced doc for canvas.group._prepared() * Enhanced doc for canvas.group._apply_tasks() * Enhanced doc for canvas.group._freeze_gid() * Enhanced doc for canvas.group._freeze_unroll() * Enhanced doc for canvas.group._unroll_tasks() * Enhanced doc for canvas.group._freeze_tasks() * Enhanced doc for canvas.group._freeze_group_tasks() * Update celery/canvas.py Co-authored-by: Omer Katz * Added example doc for group.from_dict() Co-authored-by: Omer Katz --- celery/canvas.py | 137 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 130 insertions(+), 7 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 7bf904fca2c..ce26dcc1cb6 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1448,6 +1448,50 @@ class group(Signature): @classmethod def from_dict(cls, d, app=None): + """Create a group signature from a dictionary that represents a group. + + Example: + >>> group_dict = { + "task": "celery.group", + "args": [], + "kwargs": { + "tasks": [ + { + "task": "add", + "args": [ + 1, + 2 + ], + "kwargs": {}, + "options": {}, + "subtask_type": None, + "immutable": False + }, + { + "task": "add", + "args": [ + 3, + 4 + ], + "kwargs": {}, + "options": {}, + "subtask_type": None, + "immutable": False + } + ] + }, + "options": {}, + "subtask_type": "group", + "immutable": False + } + >>> group_sig = group.from_dict(group_dict) + + Iterates over the given tasks in the dictionary and convert them to signatures. + Tasks needs to be defined in d['kwargs']['tasks'] as a sequence + of tasks. + + The tasks themselves can be dictionaries or signatures (or both). + """ # We need to mutate the `kwargs` element in place to avoid confusing # `freeze()` implementations which end up here and expect to be able to # access elements from that dictionary later and refer to objects @@ -1466,6 +1510,8 @@ def __init__(self, *tasks, **options): if isinstance(tasks, abstract.CallableSignature): tasks = [tasks.clone()] if not isinstance(tasks, _regen): + # May potentially cause slow downs when using a + # generator of many tasks - Issue #6973 tasks = regen(tasks) super().__init__('celery.group', (), {'tasks': tasks}, **options ) @@ -1479,6 +1525,7 @@ def __or__(self, other): return chord(self, body=other, app=self._app) def skew(self, start=1.0, stop=None, step=1.0): + # TODO: Not sure if this is still used anywhere (besides its own tests). Consider removing. it = fxrange(start, stop, step, repeatlast=True) for task in self.tasks: task.set(countdown=next(it)) @@ -1591,6 +1638,32 @@ def _prepared(self, tasks, partial_args, group_id, root_id, app, CallableSignature=abstract.CallableSignature, from_dict=Signature.from_dict, isinstance=isinstance, tuple=tuple): + """Recursively unroll the group into a generator of its tasks. + + This is used by :meth:`apply_async` and :meth:`apply` to + unroll the group into a list of tasks that can be evaluated. + + Note: + This does not change the group itself, it only returns + a generator of the tasks that the group would evaluate to. + + Arguments: + tasks (list): List of tasks in the group (may contain nested groups). + partial_args (list): List of arguments to be prepended to + the arguments of each task. + group_id (str): The group id of the group. + root_id (str): The root id of the group. + app (Celery): The Celery app instance. + CallableSignature (class): The signature class of the group's tasks. + from_dict (fun): Function to create a signature from a dict. + isinstance (fun): Function to check if an object is an instance + of a class. + tuple (class): A tuple-like class. + + Returns: + generator: A generator for the unrolled group tasks. + The generator yields tuples of the form ``(task, AsyncResult, group_id)``. + """ for task in tasks: if isinstance(task, CallableSignature): # local sigs are always of type Signature, and we @@ -1613,6 +1686,25 @@ def _prepared(self, tasks, partial_args, group_id, root_id, app, def _apply_tasks(self, tasks, producer=None, app=None, p=None, add_to_parent=None, chord=None, args=None, kwargs=None, **options): + """Run all the tasks in the group. + + This is used by :meth:`apply_async` to run all the tasks in the group + and return a generator of their results. + + Arguments: + tasks (list): List of tasks in the group. + producer (Producer): The producer to use to publish the tasks. + app (Celery): The Celery app instance. + p (barrier): Barrier object to synchronize the tasks results. + args (list): List of arguments to be prepended to + the arguments of each task. + kwargs (dict): Dict of keyword arguments to be merged with + the keyword arguments of each task. + **options (dict): Options to be merged with the options of each task. + + Returns: + generator: A generator for the AsyncResult of the tasks in the group. + """ # pylint: disable=redefined-outer-name # XXX chord is also a class in outer scope. app = app or self.app @@ -1656,6 +1748,7 @@ def _apply_tasks(self, tasks, producer=None, app=None, p=None, yield res # <-- r.parent, etc set in the frozen result. def _freeze_gid(self, options): + """Freeze the group id by the existing task_id or a new UUID.""" # remove task_id and use that as the group_id, # if we don't remove it then every task will have the same id... options = {**self.options, **{ @@ -1668,6 +1761,15 @@ def _freeze_gid(self, options): def _freeze_group_tasks(self, _id=None, group_id=None, chord=None, root_id=None, parent_id=None, group_index=None): + """Freeze the tasks in the group. + + Note: + If the group tasks are created from a generator, the tasks generator would + not be exhausted, and the tasks would be frozen lazily. + + Returns: + tuple: A tuple of the group id, and the AsyncResult of each of the group tasks. + """ # pylint: disable=redefined-outer-name # XXX chord is also a class in outer scope. opts = self.options @@ -1684,15 +1786,16 @@ def _freeze_group_tasks(self, _id=None, group_id=None, chord=None, root_id = opts.setdefault('root_id', root_id) parent_id = opts.setdefault('parent_id', parent_id) if isinstance(self.tasks, _regen): - # We are draining from a generator here. - # tasks1, tasks2 are each a clone of self.tasks + # When the group tasks are a generator, we need to make sure we don't + # exhaust it during the freeze process. We use two generators to do this. + # One generator will be used to freeze the tasks to get their AsyncResult. + # The second generator will be used to replace the tasks in the group with an unexhausted state. + + # Create two new generators from the original generator of the group tasks (cloning the tasks). tasks1, tasks2 = itertools.tee(self._unroll_tasks(self.tasks)) - # freeze each task in tasks1, results now holds AsyncResult for each task + # Use the first generator to freeze the group tasks to acquire the AsyncResult for each task. results = regen(self._freeze_tasks(tasks1, group_id, chord, root_id, parent_id)) - # TODO figure out why this makes sense - - # we freeze all tasks in the clone tasks1, and then zip the results - # with the IDs of tasks in the second clone, tasks2. and then, we build - # a generator that takes only the task IDs from tasks2. + # Use the second generator to replace the exhausted generator of the group tasks. self.tasks = regen(tasks2) else: new_tasks = [] @@ -1717,6 +1820,7 @@ def freeze(self, _id=None, group_id=None, chord=None, _freeze = freeze def _freeze_tasks(self, tasks, group_id, chord, root_id, parent_id): + """Creates a generator for the AsyncResult of each task in the tasks argument.""" yield from (task.freeze(group_id=group_id, chord=chord, root_id=root_id, @@ -1725,10 +1829,29 @@ def _freeze_tasks(self, tasks, group_id, chord, root_id, parent_id): for group_index, task in enumerate(tasks)) def _unroll_tasks(self, tasks): + """Creates a generator for the cloned tasks of the tasks argument.""" # should be refactored to: (maybe_signature(task, app=self._app, clone=True) for task in tasks) yield from (maybe_signature(task, app=self._app).clone() for task in tasks) def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id): + """Generator for the frozen flattened group tasks. + + Creates a flattened list of the tasks in the group, and freezes + each task in the group. Nested groups will be recursively flattened. + + Exhausting the generator will create a new list of the flattened + tasks in the group and will return it in the new_tasks argument. + + Arguments: + new_tasks (list): The list to append the flattened tasks to. + group_id (str): The group_id to use for the tasks. + chord (Chord): The chord to use for the tasks. + root_id (str): The root_id to use for the tasks. + parent_id (str): The parent_id to use for the tasks. + + Yields: + AsyncResult: The frozen task. + """ # pylint: disable=redefined-outer-name # XXX chord is also a class in outer scope. stack = deque(self.tasks) From a6b16c5f794d9d00188cdc8ae55bc6fee090c155 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Nov 2022 17:02:04 +0000 Subject: [PATCH 0247/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.2.0 → v3.2.2](https://github.com/asottile/pyupgrade/compare/v3.2.0...v3.2.2) - [github.com/pre-commit/mirrors-mypy: v0.982 → v0.990](https://github.com/pre-commit/mirrors-mypy/compare/v0.982...v0.990) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f91e4309713..81428931931 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.2.0 + rev: v3.2.2 hooks: - id: pyupgrade args: ["--py37-plus"] @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.982 + rev: v0.990 hooks: - id: mypy pass_filenames: false From b5bc40f04aad9cbff5e0c605103009cf9cb0e282 Mon Sep 17 00:00:00 2001 From: ShaheedHaque Date: Tue, 15 Nov 2022 06:35:44 +0000 Subject: [PATCH 0248/1051] Fix test warnings (#7906) * Ensure all implementations of BasePool._get_info() use the super() results as a base. * Have BasePool._get_info() report the implementation class of the pool using the standard Celery class naming convention. * Allow for an out-of-tree worker pool implementation. This is used as follows: - Set the environment variable CELERY_CUSTOM_WORKER_POOL to the name of an implementation of :class:`celery.concurrency.base.BasePool` in the standard Celery format of "package:class". - Select this pool using '--pool custom'. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes for missed test breakage. * Silence test code deprecation warnings (warning count reduced from 1674 to 45). The deprecations were of the form: ======= t/unit/worker/test_worker.py::test_WorkController::test_Pool_create /main/srhaque/kdedev/celery/.eggs/pytest-7.2.0-py3.10.egg/_pytest/fixtures.py:900: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. t/unit/worker/test_worker.py::test_WorkController::test_Pool_create is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) t/unit/worker/test_worker.py::test_WorkController::test_Pool_create /main/srhaque/kdedev/celery/.eggs/pytest-7.2.0-py3.10.egg/_pytest/fixtures.py:916: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. t/unit/worker/test_worker.py::test_WorkController::test_Pool_create is using nose-specific method: `teardown(self)` To remove this warning, rename it to `teardown_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose next(it) ======= Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/concurrency/__init__.py | 15 +++++++++++++++ celery/concurrency/base.py | 10 +++++++++- celery/concurrency/prefork.py | 6 ++++-- celery/concurrency/solo.py | 6 ++++-- celery/concurrency/thread.py | 6 ++++-- t/unit/app/test_amqp.py | 2 +- t/unit/app/test_annotations.py | 2 +- t/unit/app/test_app.py | 2 +- t/unit/app/test_builtins.py | 16 ++++++++-------- t/unit/app/test_control.py | 6 +++--- t/unit/app/test_defaults.py | 4 ++-- t/unit/app/test_loaders.py | 4 ++-- t/unit/app/test_log.py | 6 +++--- t/unit/app/test_registry.py | 2 +- t/unit/app/test_routes.py | 2 +- t/unit/app/test_schedules.py | 4 ++-- t/unit/apps/test_multi.py | 4 ++-- t/unit/backends/test_arangodb.py | 2 +- t/unit/backends/test_azureblockblob.py | 4 ++-- t/unit/backends/test_base.py | 12 ++++++------ t/unit/backends/test_cache.py | 4 ++-- t/unit/backends/test_cassandra.py | 2 +- t/unit/backends/test_consul.py | 2 +- t/unit/backends/test_cosmosdbsql.py | 2 +- t/unit/backends/test_couchbase.py | 2 +- t/unit/backends/test_couchdb.py | 2 +- t/unit/backends/test_database.py | 4 ++-- t/unit/backends/test_dynamodb.py | 2 +- t/unit/backends/test_elasticsearch.py | 2 +- t/unit/backends/test_filesystem.py | 2 +- t/unit/backends/test_mongodb.py | 2 +- t/unit/backends/test_redis.py | 4 ++-- t/unit/backends/test_rpc.py | 2 +- t/unit/concurrency/test_concurrency.py | 3 +++ t/unit/concurrency/test_eventlet.py | 5 +++-- t/unit/concurrency/test_gevent.py | 4 ++-- t/unit/concurrency/test_pool.py | 2 +- t/unit/concurrency/test_prefork.py | 4 ++-- t/unit/contrib/test_abortable.py | 2 +- t/unit/contrib/test_worker.py | 2 +- t/unit/events/test_cursesmon.py | 2 +- t/unit/events/test_snapshot.py | 4 ++-- t/unit/security/case.py | 2 +- t/unit/security/test_security.py | 2 +- t/unit/tasks/test_canvas.py | 2 +- t/unit/tasks/test_chord.py | 4 ++-- t/unit/tasks/test_result.py | 10 +++++----- t/unit/tasks/test_tasks.py | 2 +- t/unit/tasks/test_trace.py | 2 +- t/unit/utils/test_collections.py | 2 +- t/unit/worker/test_autoscale.py | 2 +- t/unit/worker/test_bootsteps.py | 4 ++-- t/unit/worker/test_components.py | 2 +- t/unit/worker/test_consumer.py | 2 +- t/unit/worker/test_control.py | 2 +- t/unit/worker/test_loops.py | 4 ++-- t/unit/worker/test_request.py | 6 +++--- t/unit/worker/test_state.py | 2 +- t/unit/worker/test_strategy.py | 6 +++--- t/unit/worker/test_worker.py | 8 ++++---- 60 files changed, 136 insertions(+), 103 deletions(-) diff --git a/celery/concurrency/__init__.py b/celery/concurrency/__init__.py index a326c79aff2..5fd0d9cad42 100644 --- a/celery/concurrency/__init__.py +++ b/celery/concurrency/__init__.py @@ -1,4 +1,5 @@ """Pool implementation abstract factory, and alias definitions.""" +import os # Import from kombu directly as it's used # early in the import stage, where celery.utils loads @@ -21,6 +22,20 @@ pass else: ALIASES['threads'] = 'celery.concurrency.thread:TaskPool' +# +# Allow for an out-of-tree worker pool implementation. This is used as follows: +# +# - Set the environment variable CELERY_CUSTOM_WORKER_POOL to the name of +# an implementation of :class:`celery.concurrency.base.BasePool` in the +# standard Celery format of "package:class". +# - Select this pool using '--pool custom'. +# +try: + custom = os.environ.get('CELERY_CUSTOM_WORKER_POOL') +except KeyError: + pass +else: + ALIASES['custom'] = custom def get_implementation(cls): diff --git a/celery/concurrency/base.py b/celery/concurrency/base.py index 0b4db3fbf35..1ce9a751ea2 100644 --- a/celery/concurrency/base.py +++ b/celery/concurrency/base.py @@ -3,6 +3,7 @@ import os import sys import time +from typing import Any, Dict from billiard.einfo import ExceptionInfo from billiard.exceptions import WorkerLostError @@ -154,8 +155,15 @@ def apply_async(self, target, args=None, kwargs=None, **options): callbacks_propagate=self.callbacks_propagate, **options) - def _get_info(self): + def _get_info(self) -> Dict[str, Any]: + """ + Return configuration and statistics information. Subclasses should + augment the data as required. + + :return: The returned value must be JSON-friendly. + """ return { + 'implementation': self.__class__.__module__ + ':' + self.__class__.__name__, 'max-concurrency': self.limit, } diff --git a/celery/concurrency/prefork.py b/celery/concurrency/prefork.py index 40772ebae1a..b163328d0b3 100644 --- a/celery/concurrency/prefork.py +++ b/celery/concurrency/prefork.py @@ -155,7 +155,8 @@ def on_close(self): def _get_info(self): write_stats = getattr(self._pool, 'human_write_stats', None) - return { + info = super()._get_info() + info.update({ 'max-concurrency': self.limit, 'processes': [p.pid for p in self._pool._pool], 'max-tasks-per-child': self._pool._maxtasksperchild or 'N/A', @@ -163,7 +164,8 @@ def _get_info(self): 'timeouts': (self._pool.soft_timeout or 0, self._pool.timeout or 0), 'writes': write_stats() if write_stats is not None else 'N/A', - } + }) + return info @property def num_processes(self): diff --git a/celery/concurrency/solo.py b/celery/concurrency/solo.py index ea6e274a3ba..e7e9c7f3ba4 100644 --- a/celery/concurrency/solo.py +++ b/celery/concurrency/solo.py @@ -20,10 +20,12 @@ def __init__(self, *args, **kwargs): signals.worker_process_init.send(sender=None) def _get_info(self): - return { + info = super()._get_info() + info.update({ 'max-concurrency': 1, 'processes': [os.getpid()], 'max-tasks-per-child': None, 'put-guarded-by-semaphore': True, 'timeouts': (), - } + }) + return info diff --git a/celery/concurrency/thread.py b/celery/concurrency/thread.py index 120374bcf9b..b9c23e0173a 100644 --- a/celery/concurrency/thread.py +++ b/celery/concurrency/thread.py @@ -61,7 +61,9 @@ def on_apply( return ApplyResult(f) def _get_info(self) -> PoolInfo: - return { + info = super()._get_info() + info.update({ 'max-concurrency': self.limit, 'threads': len(self.executor._threads) - } + }) + return info diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index 1010c4c64ce..070002d43f4 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -206,7 +206,7 @@ def test_as_task_message_without_utc(self): class test_AMQP_Base: - def setup(self): + def setup_method(self): self.simple_message = self.app.amqp.as_task_v2( uuid(), 'foo', create_sent_event=True, ) diff --git a/t/unit/app/test_annotations.py b/t/unit/app/test_annotations.py index e262e23ce84..7b13d37ef6a 100644 --- a/t/unit/app/test_annotations.py +++ b/t/unit/app/test_annotations.py @@ -8,7 +8,7 @@ class MyAnnotation: class AnnotationCase: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 844934b71b1..9d504f9fcc4 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -71,7 +71,7 @@ def test_task_join_will_block(self, patching): class test_App: - def setup(self): + def setup_method(self): self.app.add_defaults(deepcopy(self.CELERY_TEST_CONFIG)) def test_now(self): diff --git a/t/unit/app/test_builtins.py b/t/unit/app/test_builtins.py index dcbec4b201b..94ab14e9c97 100644 --- a/t/unit/app/test_builtins.py +++ b/t/unit/app/test_builtins.py @@ -10,7 +10,7 @@ class BuiltinsCase: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def xsum(x): return sum(x) @@ -34,7 +34,7 @@ def test_run(self): class test_accumulate(BuiltinsCase): - def setup(self): + def setup_method(self): self.accumulate = self.app.tasks['celery.accumulate'] def test_with_index(self): @@ -89,7 +89,7 @@ def chunks_mul(l): class test_group(BuiltinsCase): - def setup(self): + def setup_method(self): self.maybe_signature = self.patching('celery.canvas.maybe_signature') self.maybe_signature.side_effect = pass1 self.app.producer_or_acquire = Mock() @@ -98,7 +98,7 @@ def setup(self): ) self.app.conf.task_always_eager = True self.task = builtins.add_group_task(self.app) - super().setup() + super().setup_method() def test_apply_async_eager(self): self.task.apply = Mock(name='apply') @@ -132,8 +132,8 @@ def test_task__disable_add_to_parent(self, current_worker_task): class test_chain(BuiltinsCase): - def setup(self): - super().setup() + def setup_method(self): + super().setup_method() self.task = builtins.add_chain_task(self.app) def test_not_implemented(self): @@ -143,9 +143,9 @@ def test_not_implemented(self): class test_chord(BuiltinsCase): - def setup(self): + def setup_method(self): self.task = builtins.add_chord_task(self.app) - super().setup() + super().setup_method() def test_apply_async(self): x = chord([self.add.s(i, i) for i in range(10)], body=self.xsum.s()) diff --git a/t/unit/app/test_control.py b/t/unit/app/test_control.py index eb6a761e837..0908491a9ee 100644 --- a/t/unit/app/test_control.py +++ b/t/unit/app/test_control.py @@ -52,7 +52,7 @@ def test_flatten_reply(self): class test_inspect: - def setup(self): + def setup_method(self): self.app.control.broadcast = Mock(name='broadcast') self.app.control.broadcast.return_value = {} self.inspect = self.app.control.inspect() @@ -207,7 +207,7 @@ def test_report(self): class test_Control_broadcast: - def setup(self): + def setup_method(self): self.app.control.mailbox = Mock(name='mailbox') def test_broadcast(self): @@ -231,7 +231,7 @@ def test_broadcast_limit(self): class test_Control: - def setup(self): + def setup_method(self): self.app.control.broadcast = Mock(name='broadcast') self.app.control.broadcast.return_value = {} diff --git a/t/unit/app/test_defaults.py b/t/unit/app/test_defaults.py index 649ca4aab7d..509718d6b86 100644 --- a/t/unit/app/test_defaults.py +++ b/t/unit/app/test_defaults.py @@ -7,10 +7,10 @@ class test_defaults: - def setup(self): + def setup_method(self): self._prev = sys.modules.pop('celery.app.defaults', None) - def teardown(self): + def teardown_method(self): if self._prev: sys.modules['celery.app.defaults'] = self._prev diff --git a/t/unit/app/test_loaders.py b/t/unit/app/test_loaders.py index 09c8a6fe775..879887ebe9e 100644 --- a/t/unit/app/test_loaders.py +++ b/t/unit/app/test_loaders.py @@ -35,7 +35,7 @@ class test_LoaderBase: 'password': 'qwerty', 'timeout': 3} - def setup(self): + def setup_method(self): self.loader = DummyLoader(app=self.app) def test_handlers_pass(self): @@ -212,7 +212,7 @@ def find_module(self, name): class test_AppLoader: - def setup(self): + def setup_method(self): self.loader = AppLoader(app=self.app) def test_on_worker_init(self): diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py index c3a425447a3..3be3db3a70b 100644 --- a/t/unit/app/test_log.py +++ b/t/unit/app/test_log.py @@ -150,7 +150,7 @@ def setup_logger(self, *args, **kwargs): return logging.root - def setup(self): + def setup_method(self): self.get_logger = lambda n=None: get_logger(n) if n else logging.root signals.setup_logging.receivers[:] = [] self.app.log.already_setup = False @@ -312,7 +312,7 @@ def test_logging_proxy_recurse_protection(self, restore_logging): class test_task_logger(test_default_logger): - def setup(self): + def setup_method(self): logger = self.logger = get_logger('celery.task') logger.handlers = [] logging.root.manager.loggerDict.pop(logger.name, None) @@ -326,7 +326,7 @@ def test_task(): from celery._state import _task_stack _task_stack.push(test_task) - def teardown(self): + def teardown_method(self): from celery._state import _task_stack _task_stack.pop() diff --git a/t/unit/app/test_registry.py b/t/unit/app/test_registry.py index 577c42e8764..8bd8ae5dbcf 100644 --- a/t/unit/app/test_registry.py +++ b/t/unit/app/test_registry.py @@ -23,7 +23,7 @@ def test_unpickle_v2(self, app): class test_TaskRegistry: - def setup(self): + def setup_method(self): self.mytask = self.app.task(name='A', shared=False)(returns) self.missing_name_task = self.app.task( name=None, shared=False)(returns) diff --git a/t/unit/app/test_routes.py b/t/unit/app/test_routes.py index fbb2803b4d1..775bbf7abd9 100644 --- a/t/unit/app/test_routes.py +++ b/t/unit/app/test_routes.py @@ -27,7 +27,7 @@ def set_queues(app, **queues): class RouteCase: - def setup(self): + def setup_method(self): self.a_queue = { 'exchange': 'fooexchange', 'exchange_type': 'fanout', diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 71b1dba71fb..ec3baedce85 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -25,7 +25,7 @@ def patch_crontab_nowfun(cls, retval): class test_solar: - def setup(self): + def setup_method(self): pytest.importorskip('ephem') self.s = solar('sunrise', 60, 30, app=self.app) @@ -475,7 +475,7 @@ def test_day_after_dst_start(self): class test_crontab_is_due: - def setup(self): + def setup_method(self): self.now = self.app.now() self.next_minute = 60 - self.now.second - 1e-6 * self.now.microsecond self.every_minute = self.crontab() diff --git a/t/unit/apps/test_multi.py b/t/unit/apps/test_multi.py index a5c4c0e6c3a..2690872292b 100644 --- a/t/unit/apps/test_multi.py +++ b/t/unit/apps/test_multi.py @@ -172,7 +172,7 @@ def test_optmerge(self): class test_Node: - def setup(self): + def setup_method(self): self.p = Mock(name='p') self.p.options = { '--executable': 'python', @@ -308,7 +308,7 @@ def test_pidfile_custom(self, mock_exists, mock_dirs): class test_Cluster: - def setup(self): + def setup_method(self): self.Popen = self.patching('celery.apps.multi.Popen') self.kill = self.patching('os.kill') self.gethostname = self.patching('celery.apps.multi.gethostname') diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py index 4486f0b52c0..c35fb162c78 100644 --- a/t/unit/backends/test_arangodb.py +++ b/t/unit/backends/test_arangodb.py @@ -19,7 +19,7 @@ class test_ArangoDbBackend: - def setup(self): + def setup_method(self): self.backend = ArangoDbBackend(app=self.app) def test_init_no_arangodb(self): diff --git a/t/unit/backends/test_azureblockblob.py b/t/unit/backends/test_azureblockblob.py index 5329140627f..36ca91d82cb 100644 --- a/t/unit/backends/test_azureblockblob.py +++ b/t/unit/backends/test_azureblockblob.py @@ -14,7 +14,7 @@ class test_AzureBlockBlobBackend: - def setup(self): + def setup_method(self): self.url = ( "azureblockblob://" "DefaultEndpointsProtocol=protocol;" @@ -168,7 +168,7 @@ def test_base_path_conf_default(self): class test_as_uri: - def setup(self): + def setup_method(self): self.url = ( "azureblockblob://" "DefaultEndpointsProtocol=protocol;" diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index 34205caa729..d520a5d3608 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -69,7 +69,7 @@ def test_create_exception_cls(self): class test_Backend_interface: - def setup(self): + def setup_method(self): self.app.conf.accept_content = ['json'] def test_accept_precedence(self): @@ -167,7 +167,7 @@ def test_get_result_meta_with_none(self): class test_BaseBackend_interface: - def setup(self): + def setup_method(self): self.b = BaseBackend(self.app) @self.app.task(shared=False) @@ -261,7 +261,7 @@ def test_unpickleable(self): class test_prepare_exception: - def setup(self): + def setup_method(self): self.b = BaseBackend(self.app) def test_unpickleable(self): @@ -359,7 +359,7 @@ def _delete_group(self, group_id): class test_BaseBackend_dict: - def setup(self): + def setup_method(self): self.b = DictBackend(app=self.app) @self.app.task(shared=False, bind=True) @@ -650,7 +650,7 @@ def test_get_children(self): class test_KeyValueStoreBackend: - def setup(self): + def setup_method(self): self.b = KVBackend(app=self.app) def test_on_chord_part_return(self): @@ -1031,7 +1031,7 @@ def test_chain_with_chord_raises_error(self): class test_as_uri: - def setup(self): + def setup_method(self): self.b = BaseBackend( app=self.app, url='sch://uuuu:pwpw@hostname.dom' diff --git a/t/unit/backends/test_cache.py b/t/unit/backends/test_cache.py index 79b5b69ed1c..a82d0bbcfb9 100644 --- a/t/unit/backends/test_cache.py +++ b/t/unit/backends/test_cache.py @@ -20,14 +20,14 @@ def __init__(self, data): class test_CacheBackend: - def setup(self): + def setup_method(self): self.app.conf.result_serializer = 'pickle' self.tb = CacheBackend(backend='memory://', app=self.app) self.tid = uuid() self.old_get_best_memcached = backends['memcache'] backends['memcache'] = lambda: (DummyClient, ensure_bytes) - def teardown(self): + def teardown_method(self): backends['memcache'] = self.old_get_best_memcached def test_no_backend(self): diff --git a/t/unit/backends/test_cassandra.py b/t/unit/backends/test_cassandra.py index 75d8818bcd1..9bf8a480f3d 100644 --- a/t/unit/backends/test_cassandra.py +++ b/t/unit/backends/test_cassandra.py @@ -18,7 +18,7 @@ class test_CassandraBackend: - def setup(self): + def setup_method(self): self.app.conf.update( cassandra_servers=['example.com'], cassandra_keyspace='celery', diff --git a/t/unit/backends/test_consul.py b/t/unit/backends/test_consul.py index 61fb5d41afd..cec77360490 100644 --- a/t/unit/backends/test_consul.py +++ b/t/unit/backends/test_consul.py @@ -9,7 +9,7 @@ class test_ConsulBackend: - def setup(self): + def setup_method(self): self.backend = ConsulBackend( app=self.app, url='consul://localhost:800') diff --git a/t/unit/backends/test_cosmosdbsql.py b/t/unit/backends/test_cosmosdbsql.py index 3ee85df43dc..bfd0d0d1e1f 100644 --- a/t/unit/backends/test_cosmosdbsql.py +++ b/t/unit/backends/test_cosmosdbsql.py @@ -13,7 +13,7 @@ class test_DocumentDBBackend: - def setup(self): + def setup_method(self): self.url = "cosmosdbsql://:key@endpoint" self.backend = CosmosDBSQLBackend(app=self.app, url=self.url) diff --git a/t/unit/backends/test_couchbase.py b/t/unit/backends/test_couchbase.py index 297735a38ba..b720b2525c5 100644 --- a/t/unit/backends/test_couchbase.py +++ b/t/unit/backends/test_couchbase.py @@ -22,7 +22,7 @@ class test_CouchbaseBackend: - def setup(self): + def setup_method(self): self.backend = CouchbaseBackend(app=self.app) def test_init_no_couchbase(self): diff --git a/t/unit/backends/test_couchdb.py b/t/unit/backends/test_couchdb.py index 41505594f72..07497b18cec 100644 --- a/t/unit/backends/test_couchdb.py +++ b/t/unit/backends/test_couchdb.py @@ -20,7 +20,7 @@ class test_CouchBackend: - def setup(self): + def setup_method(self): self.Server = self.patching('pycouchdb.Server') self.backend = CouchBackend(app=self.app) diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index c32440b2fe4..511298f9a1b 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -45,7 +45,7 @@ def test_context_raises(self): @skip.if_pypy class test_DatabaseBackend: - def setup(self): + def setup_method(self): self.uri = 'sqlite:///test.db' self.app.conf.result_serializer = 'pickle' @@ -219,7 +219,7 @@ def test_TaskSet__repr__(self): @skip.if_pypy class test_DatabaseBackend_result_extended(): - def setup(self): + def setup_method(self): self.uri = 'sqlite:///test.db' self.app.conf.result_serializer = 'pickle' self.app.conf.result_extended = True diff --git a/t/unit/backends/test_dynamodb.py b/t/unit/backends/test_dynamodb.py index a27af96d6ff..0afb425e1d1 100644 --- a/t/unit/backends/test_dynamodb.py +++ b/t/unit/backends/test_dynamodb.py @@ -12,7 +12,7 @@ class test_DynamoDBBackend: - def setup(self): + def setup_method(self): self._static_timestamp = Decimal(1483425566.52) self.app.conf.result_backend = 'dynamodb://' diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index c39419eb52b..45f8a6fb092 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -31,7 +31,7 @@ class test_ElasticsearchBackend: - def setup(self): + def setup_method(self): self.backend = ElasticsearchBackend(app=self.app) def test_init_no_elasticsearch(self): diff --git a/t/unit/backends/test_filesystem.py b/t/unit/backends/test_filesystem.py index 4fb46683f4f..7f66a6aeae3 100644 --- a/t/unit/backends/test_filesystem.py +++ b/t/unit/backends/test_filesystem.py @@ -17,7 +17,7 @@ @t.skip.if_win32 class test_FilesystemBackend: - def setup(self): + def setup_method(self): self.directory = tempfile.mkdtemp() self.url = 'file://' + self.directory self.path = self.directory.encode('ascii') diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index c15ded834f1..a0bb8169ea3 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -77,7 +77,7 @@ class test_MongoBackend: 'hostname.dom/database?replicaSet=rs' ) - def setup(self): + def setup_method(self): self.patching('celery.backends.mongodb.MongoBackend.encode') self.patching('celery.backends.mongodb.MongoBackend.decode') self.patching('celery.backends.mongodb.Binary') diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py index 1643c165956..dbb11db8e3e 100644 --- a/t/unit/backends/test_redis.py +++ b/t/unit/backends/test_redis.py @@ -358,7 +358,7 @@ def chord_context(self, size=1): callback.delay = Mock(name='callback.delay') yield tasks, request, callback - def setup(self): + def setup_method(self): self.Backend = self.get_backend() self.E_LOST = self.get_E_LOST() self.b = self.Backend(app=self.app) @@ -1193,7 +1193,7 @@ def get_E_LOST(self): from celery.backends.redis import E_LOST return E_LOST - def setup(self): + def setup_method(self): self.Backend = self.get_backend() self.E_LOST = self.get_E_LOST() self.b = self.Backend(app=self.app) diff --git a/t/unit/backends/test_rpc.py b/t/unit/backends/test_rpc.py index 71e573da8ff..5d37689a31d 100644 --- a/t/unit/backends/test_rpc.py +++ b/t/unit/backends/test_rpc.py @@ -23,7 +23,7 @@ def test_drain_events_before_start(self): class test_RPCBackend: - def setup(self): + def setup_method(self): self.b = RPCBackend(app=self.app) def test_oid(self): diff --git a/t/unit/concurrency/test_concurrency.py b/t/unit/concurrency/test_concurrency.py index 1a3267bfabf..ba80aa98ec5 100644 --- a/t/unit/concurrency/test_concurrency.py +++ b/t/unit/concurrency/test_concurrency.py @@ -109,6 +109,7 @@ def test_interface_on_apply(self): def test_interface_info(self): assert BasePool(10).info == { + 'implementation': 'celery.concurrency.base:BasePool', 'max-concurrency': 10, } @@ -166,6 +167,7 @@ def test_no_concurrent_futures__returns_no_threads_pool_name(self): 'gevent', 'solo', 'processes', + 'custom', ) with patch.dict(sys.modules, {'concurrent.futures': None}): importlib.reload(concurrency) @@ -179,6 +181,7 @@ def test_concurrent_futures__returns_threads_pool_name(self): 'solo', 'processes', 'threads', + 'custom', ) with patch.dict(sys.modules, {'concurrent.futures': Mock()}): importlib.reload(concurrency) diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py index b6a46d95ceb..a044d4ae67a 100644 --- a/t/unit/concurrency/test_eventlet.py +++ b/t/unit/concurrency/test_eventlet.py @@ -22,10 +22,10 @@ @t.skip.if_pypy class EventletCase: - def setup(self): + def setup_method(self): self.patching.modules(*eventlet_modules) - def teardown(self): + def teardown_method(self): for mod in [mod for mod in sys.modules if mod.startswith('eventlet')]: try: @@ -129,6 +129,7 @@ def test_get_info(self): x = TaskPool(10) x._pool = Mock(name='_pool') assert x._get_info() == { + 'implementation': 'celery.concurrency.eventlet:TaskPool', 'max-concurrency': 10, 'free-threads': x._pool.free(), 'running-threads': x._pool.running(), diff --git a/t/unit/concurrency/test_gevent.py b/t/unit/concurrency/test_gevent.py index 89a8398ec3b..c0b24001d90 100644 --- a/t/unit/concurrency/test_gevent.py +++ b/t/unit/concurrency/test_gevent.py @@ -26,7 +26,7 @@ def test_is_patched(self): class test_Timer: - def setup(self): + def setup_method(self): self.patching.modules(*gevent_modules) self.greenlet = self.patching('gevent.greenlet') self.GreenletExit = self.patching('gevent.greenlet.GreenletExit') @@ -57,7 +57,7 @@ def test_sched(self): class test_TaskPool: - def setup(self): + def setup_method(self): self.patching.modules(*gevent_modules) self.spawn_raw = self.patching('gevent.spawn_raw') self.Pool = self.patching('gevent.pool.Pool') diff --git a/t/unit/concurrency/test_pool.py b/t/unit/concurrency/test_pool.py index 5661f13760f..1e2d70afa83 100644 --- a/t/unit/concurrency/test_pool.py +++ b/t/unit/concurrency/test_pool.py @@ -24,7 +24,7 @@ def raise_something(i): class test_TaskPool: - def setup(self): + def setup_method(self): from celery.concurrency.prefork import TaskPool self.TaskPool = TaskPool diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index 194dec78aea..49b80c17f0c 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -194,7 +194,7 @@ class ExeMockTaskPool(mp.TaskPool): @t.skip.if_win32 class test_AsynPool: - def setup(self): + def setup_method(self): pytest.importorskip('multiprocessing') def test_gen_not_started(self): @@ -369,7 +369,7 @@ def test_register_with_event_loop__no_on_tick_dupes(self): @t.skip.if_win32 class test_ResultHandler: - def setup(self): + def setup_method(self): pytest.importorskip('multiprocessing') def test_process_result(self): diff --git a/t/unit/contrib/test_abortable.py b/t/unit/contrib/test_abortable.py index 9edc8435ae4..3c3d55344ff 100644 --- a/t/unit/contrib/test_abortable.py +++ b/t/unit/contrib/test_abortable.py @@ -3,7 +3,7 @@ class test_AbortableTask: - def setup(self): + def setup_method(self): @self.app.task(base=AbortableTask, shared=False) def abortable(): return True diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index f2ccf0625bd..178a974998e 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -8,7 +8,7 @@ class test_worker: - def setup(self): + def setup_method(self): self.app = Celery('celerytest', backend='cache+memory://', broker='memory://',) @self.app.task diff --git a/t/unit/events/test_cursesmon.py b/t/unit/events/test_cursesmon.py index 17cce119fed..fa0816050de 100644 --- a/t/unit/events/test_cursesmon.py +++ b/t/unit/events/test_cursesmon.py @@ -11,7 +11,7 @@ def getmaxyx(self): class test_CursesDisplay: - def setup(self): + def setup_method(self): from celery.events import cursesmon self.monitor = cursesmon.CursesMonitor(object(), app=self.app) self.win = MockWindow() diff --git a/t/unit/events/test_snapshot.py b/t/unit/events/test_snapshot.py index 3dfb01846e9..c09d67d10e5 100644 --- a/t/unit/events/test_snapshot.py +++ b/t/unit/events/test_snapshot.py @@ -19,7 +19,7 @@ def call_repeatedly(self, secs, fun, *args, **kwargs): class test_Polaroid: - def setup(self): + def setup_method(self): self.state = self.app.events.State() def test_constructor(self): @@ -101,7 +101,7 @@ class MockEvents(Events): def Receiver(self, *args, **kwargs): return test_evcam.MockReceiver() - def setup(self): + def setup_method(self): self.app.events = self.MockEvents() self.app.events.app = self.app diff --git a/t/unit/security/case.py b/t/unit/security/case.py index 36f0e5e4c95..319853dbfda 100644 --- a/t/unit/security/case.py +++ b/t/unit/security/case.py @@ -3,5 +3,5 @@ class SecurityCase: - def setup(self): + def setup_method(self): pytest.importorskip('cryptography') diff --git a/t/unit/security/test_security.py b/t/unit/security/test_security.py index 0559919997e..fc9a5e69004 100644 --- a/t/unit/security/test_security.py +++ b/t/unit/security/test_security.py @@ -33,7 +33,7 @@ class test_security(SecurityCase): - def teardown(self): + def teardown_method(self): registry._disabled_content_types.clear() registry._set_default_serializer('json') try: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 493ce04d50a..cf294d6e624 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -44,7 +44,7 @@ def test_when_no_len_and_no_length_hint(self): class CanvasCase: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index c2aad5f894f..0c3ddf19b0b 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -20,7 +20,7 @@ def __eq__(self, other): class ChordCase: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): @@ -323,7 +323,7 @@ def sumX(n): class test_add_to_chord: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 6b288e9c557..818409c97d9 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -63,7 +63,7 @@ def remove_pending_result(self, *args, **kwargs): class test_AsyncResult: - def setup(self): + def setup_method(self): self.app.conf.result_cache_max = 100 self.app.conf.result_serializer = 'pickle' self.app.conf.result_extended = True @@ -628,7 +628,7 @@ def get_many(self, *args, **kwargs): class test_GroupResult: - def setup(self): + def setup_method(self): self.size = 10 self.ts = self.app.GroupResult( uuid(), make_mock_group(self.app, self.size), @@ -882,7 +882,7 @@ def test_result(self, app): class test_failed_AsyncResult: - def setup(self): + def setup_method(self): self.size = 11 self.app.conf.result_serializer = 'pickle' results = make_mock_group(self.app, 10) @@ -907,7 +907,7 @@ def test_failed(self): class test_pending_Group: - def setup(self): + def setup_method(self): self.ts = self.app.GroupResult( uuid(), [self.app.AsyncResult(uuid()), self.app.AsyncResult(uuid())]) @@ -932,7 +932,7 @@ def test_join_longer(self): class test_EagerResult: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def raising(x, y): raise KeyError(x, y) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 2a5f08d6c4f..a636eac73be 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -60,7 +60,7 @@ class TaskWithRetryButForTypeError(Task): class TasksCase: - def setup(self): + def setup_method(self): self.mytask = self.app.task(shared=False)(return_True) @self.app.task(bind=True, count=0, shared=False) diff --git a/t/unit/tasks/test_trace.py b/t/unit/tasks/test_trace.py index 60fa253dda3..e7767a979f5 100644 --- a/t/unit/tasks/test_trace.py +++ b/t/unit/tasks/test_trace.py @@ -28,7 +28,7 @@ def trace( class TraceCase: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y diff --git a/t/unit/utils/test_collections.py b/t/unit/utils/test_collections.py index aae685ebc7c..79ccc011741 100644 --- a/t/unit/utils/test_collections.py +++ b/t/unit/utils/test_collections.py @@ -52,7 +52,7 @@ def test_items(self): class test_ConfigurationView: - def setup(self): + def setup_method(self): self.view = ConfigurationView( {'changed_key': 1, 'both': 2}, [ diff --git a/t/unit/worker/test_autoscale.py b/t/unit/worker/test_autoscale.py index f6c63c57ac3..c4a2a75ed73 100644 --- a/t/unit/worker/test_autoscale.py +++ b/t/unit/worker/test_autoscale.py @@ -73,7 +73,7 @@ def test_info_without_event_loop(self): class test_Autoscaler: - def setup(self): + def setup_method(self): self.pool = MockPool(3) def test_stop(self): diff --git a/t/unit/worker/test_bootsteps.py b/t/unit/worker/test_bootsteps.py index cb1e91f77be..4a33f44da35 100644 --- a/t/unit/worker/test_bootsteps.py +++ b/t/unit/worker/test_bootsteps.py @@ -56,7 +56,7 @@ class test_Step: class Def(bootsteps.StartStopStep): name = 'test_Step.Def' - def setup(self): + def setup_method(self): self.steps = [] def test_blueprint_name(self, bp='test_blueprint_name'): @@ -162,7 +162,7 @@ class test_StartStopStep: class Def(bootsteps.StartStopStep): name = 'test_StartStopStep.Def' - def setup(self): + def setup_method(self): self.steps = [] def test_start__stop(self): diff --git a/t/unit/worker/test_components.py b/t/unit/worker/test_components.py index 14869cf6df7..739808e4311 100644 --- a/t/unit/worker/test_components.py +++ b/t/unit/worker/test_components.py @@ -22,7 +22,7 @@ def test_create__eventloop(self): class test_Hub: - def setup(self): + def setup_method(self): self.w = Mock(name='w') self.hub = Hub(self.w) self.w.hub = Mock(name='w.hub') diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 7865cc3ac77..f0acc0e8b99 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -41,7 +41,7 @@ def get_consumer(self, no_hub=False, **kwargs): class test_Consumer(ConsumerTestCase): - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index 33cc521cb5c..a1761a1cb01 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -116,7 +116,7 @@ def se(*args, **kwargs): class test_ControlPanel: - def setup(self): + def setup_method(self): self.panel = self.create_panel(consumer=Consumer(self.app)) @self.app.task(name='c.unittest.mytask', rate_limit=200, shared=False) diff --git a/t/unit/worker/test_loops.py b/t/unit/worker/test_loops.py index 8a1fe63e4a0..68e84562b4c 100644 --- a/t/unit/worker/test_loops.py +++ b/t/unit/worker/test_loops.py @@ -133,7 +133,7 @@ def get_task_callback(*args, **kwargs): class test_asynloop: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y @@ -529,7 +529,7 @@ def drain_events(timeout): class test_quick_drain: - def setup(self): + def setup_method(self): self.connection = Mock(name='connection') def test_drain(self): diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index b818f2837cc..ef312f44a51 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -26,7 +26,7 @@ class RequestCase: - def setup(self): + def setup_method(self): self.app.conf.result_serializer = 'pickle' @self.app.task(shared=False) @@ -1173,11 +1173,11 @@ def test_group_index(self): class test_create_request_class(RequestCase): - def setup(self): + def setup_method(self): self.task = Mock(name='task') self.pool = Mock(name='pool') self.eventer = Mock(name='eventer') - super().setup() + super().setup_method() def create_request_cls(self, **kwargs): return create_request_cls( diff --git a/t/unit/worker/test_state.py b/t/unit/worker/test_state.py index bdff94facbf..cf67aa25957 100644 --- a/t/unit/worker/test_state.py +++ b/t/unit/worker/test_state.py @@ -45,7 +45,7 @@ class MyPersistent(state.Persistent): class test_maybe_shutdown: - def teardown(self): + def teardown_method(self): state.should_stop = None state.should_terminate = None diff --git a/t/unit/worker/test_strategy.py b/t/unit/worker/test_strategy.py index 8d7098954af..366d5c62081 100644 --- a/t/unit/worker/test_strategy.py +++ b/t/unit/worker/test_strategy.py @@ -18,7 +18,7 @@ class test_proto1_to_proto2: - def setup(self): + def setup_method(self): self.message = Mock(name='message') self.body = { 'args': (1,), @@ -58,7 +58,7 @@ def test_message(self): class test_default_strategy_proto2: - def setup(self): + def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y @@ -301,7 +301,7 @@ def failed(): class test_hybrid_to_proto2: - def setup(self): + def setup_method(self): self.message = Mock(name='message', headers={"custom": "header"}) self.body = { 'args': (1,), diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index 6bf2a14a1d6..cfa67440b4c 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -77,7 +77,7 @@ def create_task_message(self, channel, *args, **kwargs): class test_Consumer(ConsumerCase): - def setup(self): + def setup_method(self): self.buffer = FastQueue() self.timer = Timer() @@ -86,7 +86,7 @@ def foo_task(x, y, z): return x * y * z self.foo_task = foo_task - def teardown(self): + def teardown_method(self): self.timer.stop() def LoopConsumer(self, buffer=None, controller=None, timer=None, app=None, @@ -697,7 +697,7 @@ def test_reset_connection_with_no_node(self): class test_WorkController(ConsumerCase): - def setup(self): + def setup_method(self): self.worker = self.create_worker() self._logger = worker_module.logger self._comp_logger = components.logger @@ -709,7 +709,7 @@ def foo_task(x, y, z): return x * y * z self.foo_task = foo_task - def teardown(self): + def teardown_method(self): worker_module.logger = self._logger components.logger = self._comp_logger From 706ebb64c8c5a9c93796ac5f63ca13ee3dce3dae Mon Sep 17 00:00:00 2001 From: ShaheedHaque Date: Tue, 15 Nov 2022 06:38:00 +0000 Subject: [PATCH 0249/1051] Support for out-of-tree worker pool implementations (#7880) * Ensure all implementations of BasePool._get_info() use the super() results as a base. * Have BasePool._get_info() report the implementation class of the pool using the standard Celery class naming convention. * Allow for an out-of-tree worker pool implementation. This is used as follows: - Set the environment variable CELERY_CUSTOM_WORKER_POOL to the name of an implementation of :class:`celery.concurrency.base.BasePool` in the standard Celery format of "package:class". - Select this pool using '--pool custom'. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes for missed test breakage. Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> From 577eee60d51bdd75d3658699effdf6f78a3e604d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 16 Nov 2022 12:51:25 +0200 Subject: [PATCH 0250/1051] Canvas.py doc enhancement (#7907) * Enhanced doc for canvas._chord.set_immutable() * Enhanced doc for canvas._chord.link() * Enhanced doc for canvas._chord.link_error() * Enhanced doc for canvas._chord.__length_hint__() * Enhanced doc for canvas._chord._descend() * Enhanced doc for canvas._chord.from_dict() * Enhanced doc for canvas._chord.run() --- celery/canvas.py | 98 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 97 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index ce26dcc1cb6..04f591116d8 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1920,6 +1920,60 @@ class _chord(Signature): @classmethod def from_dict(cls, d, app=None): + """Create a chord signature from a dictionary that represents a chord. + + Example: + >>> chord_dict = { + "task": "celery.chord", + "args": [], + "kwargs": { + "kwargs": {}, + "header": [ + { + "task": "add", + "args": [ + 1, + 2 + ], + "kwargs": {}, + "options": {}, + "subtask_type": None, + "immutable": False + }, + { + "task": "add", + "args": [ + 3, + 4 + ], + "kwargs": {}, + "options": {}, + "subtask_type": None, + "immutable": False + } + ], + "body": { + "task": "xsum", + "args": [], + "kwargs": {}, + "options": {}, + "subtask_type": None, + "immutable": False + } + }, + "options": {}, + "subtask_type": "chord", + "immutable": False + } + >>> chord_sig = chord.from_dict(chord_dict) + + Iterates over the given tasks in the dictionary and convert them to signatures. + Chord header needs to be defined in d['kwargs']['header'] as a sequence + of tasks. + Chord body needs to be defined in d['kwargs']['body'] as a single task. + + The tasks themselves can be dictionaries or signatures (or both). + """ options = d.copy() args, options['kwargs'] = cls._unpack_args(**options['kwargs']) return cls(*args, app=app, **options) @@ -2057,6 +2111,10 @@ def apply(self, args=None, kwargs=None, @classmethod def _descend(cls, sig_obj): + """Count the number of tasks in the given signature recursively. + + Descend into the signature object and return the amount of tasks it contains. + """ # Sometimes serialized signatures might make their way here if not isinstance(sig_obj, Signature) and isinstance(sig_obj, dict): sig_obj = Signature.from_dict(sig_obj) @@ -2083,12 +2141,34 @@ def _descend(cls, sig_obj): return len(sig_obj) def __length_hint__(self): + """Return the number of tasks in this chord's header (recursively).""" tasks = getattr(self.tasks, "tasks", self.tasks) return sum(self._descend(task) for task in tasks) def run(self, header, body, partial_args, app=None, interval=None, countdown=1, max_retries=None, eager=False, task_id=None, kwargs=None, **options): + """Execute the chord. + + Executing the chord means executing the header and sending the + result to the body. In case of an empty header, the body is + executed immediately. + + Arguments: + header (group): The header to execute. + body (Signature): The body to execute. + partial_args (tuple): Arguments to pass to the header. + app (Celery): The Celery app instance. + interval (float): The interval between retries. + countdown (int): The countdown between retries. + max_retries (int): The maximum number of retries. + task_id (str): The task id to use for the body. + kwargs (dict): Keyword arguments to pass to the header. + options (dict): Options to pass to the header. + + Returns: + AsyncResult: The result of the body (with the result of the header in the parent of the body). + """ app = app or self._get_app(body) group_id = header.options.get('task_id') or uuid() root_id = body.options.get('root_id') @@ -2140,10 +2220,19 @@ def clone(self, *args, **kwargs): return signature def link(self, callback): + """Links a callback to the chord body only.""" self.body.link(callback) return callback def link_error(self, errback): + """Links an error callback to the chord body, and potentially the header as well. + + Note: + The ``task_allow_error_cb_on_chord_header`` setting controls whether + error callbacks are allowed on the header. If this setting is + ``False`` (the current default), then the error callback will only be + applied to the body. + """ if self.app.conf.task_allow_error_cb_on_chord_header: # self.tasks can be a list of the chord header workflow. if isinstance(self.tasks, (list, tuple)): @@ -2165,7 +2254,14 @@ def link_error(self, errback): return errback def set_immutable(self, immutable): - # changes mutability of header only, not callback. + """Sets the immutable flag on the chord header only. + + Note: + Does not affect the chord body. + + Arguments: + immutable (bool): The new mutability value for chord header. + """ for task in self.tasks: task.set_immutable(immutable) From 145aae8f5299ed3004b0c56c12f7295dda37ef8f Mon Sep 17 00:00:00 2001 From: William Edwards Date: Wed, 16 Nov 2022 16:37:29 +0100 Subject: [PATCH 0251/1051] Use bound task in base task example. Closes #7909 --- docs/userguide/tasks.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 16a73ec6e79..6f9ceed528f 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1432,9 +1432,11 @@ The above can be added to each task like this: .. code-block:: python - @app.task(base=DatabaseTask) - def process_rows(): - for row in process_rows.db.table.all(): + from celery.app import task + + @app.task(base=DatabaseTask, bind=True) + def process_rows(self: task): + for row in self.db.table.all(): process_row(row) The ``db`` attribute of the ``process_rows`` task will then From 139293644a59c4559b6b290719d41443c2c44cd7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 20 Nov 2022 17:45:46 +0200 Subject: [PATCH 0252/1051] Allow the stamping visitor itself to set the stamp value type instead of casting it to a list even when the value is a single item --- celery/app/amqp.py | 2 +- t/integration/test_canvas.py | 44 ++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/celery/app/amqp.py b/celery/app/amqp.py index e3245811035..9e52af4a66f 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -321,7 +321,7 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, if not root_id: # empty root_id defaults to task_id root_id = task_id - stamps = {header: maybe_list(options[header]) for header in stamped_headers or []} + stamps = {header: options[header] for header in stamped_headers or []} headers = { 'lang': 'py', 'task': name, diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 8e805db49b7..de2a200f461 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -10,6 +10,7 @@ from celery import chain, chord, group, signature from celery.backends.base import BaseKeyValueStoreBackend +from celery.canvas import StampingVisitor from celery.exceptions import ImproperlyConfigured, TimeoutError from celery.result import AsyncResult, GroupResult, ResultSet from celery.signals import before_task_publish @@ -2953,3 +2954,46 @@ def test_rebuild_nested_chord_chord(self, manager): tasks.rebuild_signature.s() ) sig.delay().get(timeout=TIMEOUT) + + +class test_stamping_visitor: + def test_stamp_value_type_defined_by_visitor(self, manager, subtests): + """ Test that the visitor can define the type of the stamped value """ + + @before_task_publish.connect + def before_task_publish_handler(sender=None, body=None, exchange=None, routing_key=None, headers=None, + properties=None, declare=None, retry_policy=None, **kwargs): + nonlocal task_headers + task_headers = headers.copy() + + with subtests.test(msg='Test stamping a single value'): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': 42} + + stamped_task = add.si(1, 1) + stamped_task.stamp(visitor=CustomStampingVisitor()) + result = stamped_task.freeze() + task_headers = None + stamped_task.apply_async() + assert task_headers is not None + assert result.get() == 2 + assert 'stamps' in task_headers + assert 'stamp' in task_headers['stamps'] + assert not isinstance(task_headers['stamps']['stamp'], list) + + with subtests.test(msg='Test stamping a list of values'): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': [4, 2]} + + stamped_task = add.si(1, 1) + stamped_task.stamp(visitor=CustomStampingVisitor()) + result = stamped_task.freeze() + task_headers = None + stamped_task.apply_async() + assert task_headers is not None + assert result.get() == 2 + assert 'stamps' in task_headers + assert 'stamp' in task_headers['stamps'] + assert isinstance(task_headers['stamps']['stamp'], list) From 570c4a6172e3413e6089e739887175ff92b73d61 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 22 Nov 2022 09:31:24 +0200 Subject: [PATCH 0253/1051] Stamping a task left the task properties dirty (#7916) * Added test_properties_not_affected_from_stamping * Removed stamped headers from task options before sending to broker * Fixed linter issues --- celery/app/base.py | 4 ++++ t/integration/test_canvas.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/celery/app/base.py b/celery/app/base.py index 6ca3eaf5ada..d400cd1c000 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -778,6 +778,10 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, **options ) + stamped_headers = options.pop('stamped_headers', []) + for stamp in stamped_headers: + options.pop(stamp) + if connection: producer = amqp.Producer(connection, auto_declare=False) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index de2a200f461..d3c3dc4c5f7 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2997,3 +2997,34 @@ def on_signature(self, sig, **headers) -> dict: assert 'stamps' in task_headers assert 'stamp' in task_headers['stamps'] assert isinstance(task_headers['stamps']['stamp'], list) + + def test_properties_not_affected_from_stamping(self, manager, subtests): + """ Test that the task properties are not dirty with stamping visitor entries """ + + @before_task_publish.connect + def before_task_publish_handler(sender=None, body=None, exchange=None, routing_key=None, headers=None, + properties=None, declare=None, retry_policy=None, **kwargs): + nonlocal task_headers + nonlocal task_properties + task_headers = headers.copy() + task_properties = properties.copy() + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': 42} + + stamped_task = add.si(1, 1) + stamped_task.stamp(visitor=CustomStampingVisitor()) + result = stamped_task.freeze() + task_headers = None + task_properties = None + stamped_task.apply_async() + assert task_properties is not None + assert result.get() == 2 + assert 'stamped_headers' in task_headers + stamped_headers = task_headers['stamped_headers'] + + with subtests.test(msg='Test that the task properties are not dirty with stamping visitor entries'): + assert 'stamped_headers' not in task_properties, 'stamped_headers key should not be in task properties' + for stamp in stamped_headers: + assert stamp not in task_properties, f'The stamp "{stamp}" should not be in the task properties' From bfd8587ddbf44b945c67441ceb70458a4385154e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:05:34 +0000 Subject: [PATCH 0254/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v0.990 → v0.991](https://github.com/pre-commit/mirrors-mypy/compare/v0.990...v0.991) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 81428931931..279949078f8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.990 + rev: v0.991 hooks: - id: mypy pass_filenames: false From 87613c780ccd92c8b2694becfb50511a6052e8f1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 27 Nov 2022 14:13:48 +0200 Subject: [PATCH 0255/1051] Fixed bug when chaining a chord with a group (#7919) * Reproduced Bug from Issue #5958 * Fixed Issue #5958 * Added unit test: test_chord__or__group_of_single_task() * Added unit test: test_chord_upgrade_on_chaining() * Added unit test: test_chain_of_chord__or__group_of_single_task() * Added unit test: test_chain_of_chord_upgrade_on_chaining() --- celery/canvas.py | 11 +++ t/integration/test_canvas.py | 148 +++++++++++++++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 32 ++++++++ 3 files changed, 191 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 04f591116d8..837364145a0 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -916,6 +916,10 @@ def __or__(self, other): if not tasks: # If the chain is empty, return the group return other + if isinstance(tasks[-1], chord): + # CHAIN [last item is chord] | GROUP -> chain with chord body. + tasks[-1].body = tasks[-1].body | other + return type(self)(tasks, app=self.app) # use type(self) for _chain subclasses return type(self)(seq_concat_item( tasks, other), app=self._app) @@ -2004,6 +2008,13 @@ def __or__(self, other): sig = self.clone() sig.body = sig.body | other return sig + elif isinstance(other, group) and len(other.tasks) == 1: + # chord | group -> chain with chord body. + # unroll group with one member + other = maybe_unroll_group(other) + sig = self.clone() + sig.body = sig.body | other + return sig else: return super().__or__(other) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index d3c3dc4c5f7..8f84c45df76 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -859,6 +859,154 @@ def before_task_publish_handler(sender=None, body=None, exchange=None, routing_k redis_connection = get_redis_connection() redis_connection.delete(redis_key) + def test_chaining_upgraded_chords_pure_groups(self, manager, subtests): + """ This test is built to reproduce the github issue https://github.com/celery/celery/issues/5958 + + The issue describes a canvas where a chain of groups are executed multiple times instead of once. + This test is built to reproduce the issue and to verify that the issue is fixed. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + redis_connection = get_redis_connection() + redis_key = 'echo_chamber' + + c = chain( + # letting the chain upgrade the chord, reproduces the issue in _chord.__or__ + group( + redis_echo.si('1', redis_key=redis_key), + redis_echo.si('2', redis_key=redis_key), + redis_echo.si('3', redis_key=redis_key), + ), + group( + redis_echo.si('4', redis_key=redis_key), + redis_echo.si('5', redis_key=redis_key), + redis_echo.si('6', redis_key=redis_key), + ), + group( + redis_echo.si('7', redis_key=redis_key), + ), + group( + redis_echo.si('8', redis_key=redis_key), + ), + redis_echo.si('9', redis_key=redis_key), + redis_echo.si('Done', redis_key='Done'), + ) + + with subtests.test(msg='Run the chain and wait for completion'): + redis_connection.delete(redis_key, 'Done') + c.delay().get(timeout=TIMEOUT) + await_redis_list_message_length(1, redis_key='Done', timeout=10) + + with subtests.test(msg='All tasks are executed once'): + actual = [sig.decode('utf-8') for sig in redis_connection.lrange(redis_key, 0, -1)] + expected = [str(i) for i in range(1, 10)] + with subtests.test(msg='All tasks are executed once'): + assert sorted(actual) == sorted(expected) + + # Cleanup + redis_connection.delete(redis_key, 'Done') + + def test_chaining_upgraded_chords_starting_with_chord(self, manager, subtests): + """ This test is built to reproduce the github issue https://github.com/celery/celery/issues/5958 + + The issue describes a canvas where a chain of groups are executed multiple times instead of once. + This test is built to reproduce the issue and to verify that the issue is fixed. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + redis_connection = get_redis_connection() + redis_key = 'echo_chamber' + + c = chain( + # by manually upgrading the chord to a group, we can reproduce the issue in _chain.__or__ + chord(group([redis_echo.si('1', redis_key=redis_key), + redis_echo.si('2', redis_key=redis_key), + redis_echo.si('3', redis_key=redis_key)]), + group([redis_echo.si('4', redis_key=redis_key), + redis_echo.si('5', redis_key=redis_key), + redis_echo.si('6', redis_key=redis_key)])), + group( + redis_echo.si('7', redis_key=redis_key), + ), + group( + redis_echo.si('8', redis_key=redis_key), + ), + redis_echo.si('9', redis_key=redis_key), + redis_echo.si('Done', redis_key='Done'), + ) + + with subtests.test(msg='Run the chain and wait for completion'): + redis_connection.delete(redis_key, 'Done') + c.delay().get(timeout=TIMEOUT) + await_redis_list_message_length(1, redis_key='Done', timeout=10) + + with subtests.test(msg='All tasks are executed once'): + actual = [sig.decode('utf-8') for sig in redis_connection.lrange(redis_key, 0, -1)] + expected = [str(i) for i in range(1, 10)] + with subtests.test(msg='All tasks are executed once'): + assert sorted(actual) == sorted(expected) + + # Cleanup + redis_connection.delete(redis_key, 'Done') + + def test_chaining_upgraded_chords_mixed_canvas(self, manager, subtests): + """ This test is built to reproduce the github issue https://github.com/celery/celery/issues/5958 + + The issue describes a canvas where a chain of groups are executed multiple times instead of once. + This test is built to reproduce the issue and to verify that the issue is fixed. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + redis_connection = get_redis_connection() + redis_key = 'echo_chamber' + + c = chain( + chord(group([redis_echo.si('1', redis_key=redis_key), + redis_echo.si('2', redis_key=redis_key), + redis_echo.si('3', redis_key=redis_key)]), + group([redis_echo.si('4', redis_key=redis_key), + redis_echo.si('5', redis_key=redis_key), + redis_echo.si('6', redis_key=redis_key)])), + redis_echo.si('7', redis_key=redis_key), + group( + redis_echo.si('8', redis_key=redis_key), + ), + redis_echo.si('9', redis_key=redis_key), + redis_echo.si('Done', redis_key='Done'), + ) + + with subtests.test(msg='Run the chain and wait for completion'): + redis_connection.delete(redis_key, 'Done') + c.delay().get(timeout=TIMEOUT) + await_redis_list_message_length(1, redis_key='Done', timeout=10) + + with subtests.test(msg='All tasks are executed once'): + actual = [sig.decode('utf-8') for sig in redis_connection.lrange(redis_key, 0, -1)] + expected = [str(i) for i in range(1, 10)] + with subtests.test(msg='All tasks are executed once'): + assert sorted(actual) == sorted(expected) + + # Cleanup + redis_connection.delete(redis_key, 'Done') + class test_result_set: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index cf294d6e624..97bc1807858 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -753,6 +753,22 @@ def test_chord_to_group(self): ['x0y0', 'x1y1', 'foo', 'z'] ] + def test_chain_of_chord__or__group_of_single_task(self): + c = chord([signature('header')], signature('body')) + c = chain(c) + g = group(signature('t')) + new_chain = c | g # g should be chained with the body of c[0] + assert isinstance(new_chain, _chain) + assert isinstance(new_chain.tasks[0].body, _chain) + + def test_chain_of_chord_upgrade_on_chaining(self): + c = chord([signature('header')], group(signature('body'))) + c = chain(c) + t = signature('t') + new_chain = c | t # t should be chained with the body of c[0] and create a new chord + assert isinstance(new_chain, _chain) + assert isinstance(new_chain.tasks[0].body, chord) + def test_apply_options(self): class static(Signature): @@ -2317,6 +2333,22 @@ def test_flag_allow_error_cb_on_chord_header_various_header_types(self): errback = c.link_error(sig) assert errback == sig + def test_chord__or__group_of_single_task(self): + """ Test chaining a chord to a group of a single task. """ + c = chord([signature('header')], signature('body')) + g = group(signature('t')) + stil_chord = c | g # g should be chained with the body of c + assert isinstance(stil_chord, chord) + assert isinstance(stil_chord.body, _chain) + + def test_chord_upgrade_on_chaining(self): + """ Test that chaining a chord with a group body upgrades to a new chord """ + c = chord([signature('header')], group(signature('body'))) + t = signature('t') + stil_chord = c | t # t should be chained with the body of c and create a new chord + assert isinstance(stil_chord, chord) + assert isinstance(stil_chord.body, chord) + class test_maybe_signature(CanvasCase): From c918a6dfeb6cbb840fe7865178b792731e6ca1ec Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 28 Nov 2022 21:43:18 +0200 Subject: [PATCH 0256/1051] Fixed bug in the stamping visitor mechanism where the request was lacking the stamps in the 'stamps' property (#7928) * Added integration test: test_task_received_has_access_to_stamps() * Fixed bug in Request.stamps property where the 'stamps' key wasn't used to access the stamps --- celery/worker/request.py | 2 +- t/integration/test_canvas.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/celery/worker/request.py b/celery/worker/request.py index b409bdc60da..ff8020a6f0f 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -327,7 +327,7 @@ def stamped_headers(self) -> list: @property def stamps(self) -> dict: - return {header: self._request_dict[header] for header in self.stamped_headers} + return {header: self._request_dict['stamps'][header] for header in self.stamped_headers} @property def correlation_id(self): diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 8f84c45df76..ffb1de27687 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -13,7 +13,7 @@ from celery.canvas import StampingVisitor from celery.exceptions import ImproperlyConfigured, TimeoutError from celery.result import AsyncResult, GroupResult, ResultSet -from celery.signals import before_task_publish +from celery.signals import before_task_publish, task_received from . import tasks from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection @@ -3176,3 +3176,30 @@ def on_signature(self, sig, **headers) -> dict: assert 'stamped_headers' not in task_properties, 'stamped_headers key should not be in task properties' for stamp in stamped_headers: assert stamp not in task_properties, f'The stamp "{stamp}" should not be in the task properties' + + def test_task_received_has_access_to_stamps(self, manager): + """ Make sure that the request has the stamps using the task_received signal """ + + assertion_result = False + + @task_received.connect + def task_received_handler( + sender=None, + request=None, + signal=None, + **kwargs + ): + nonlocal assertion_result + assertion_result = all([ + stamped_header in request.stamps + for stamped_header in request.stamped_headers + ]) + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': 42} + + stamped_task = add.si(1, 1) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.apply_async().get() + assert assertion_result From 2960b8979fd8a5b1bc0b1917d2fd9dcdca047c71 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 29 Nov 2022 17:09:35 +0200 Subject: [PATCH 0257/1051] Fixed bug in task_accepted() where the request was not added to the `requests` but only to the `active_requests` (#7929) --- celery/worker/state.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/celery/worker/state.py b/celery/worker/state.py index 74b28d4397e..1c7ab3942fa 100644 --- a/celery/worker/state.py +++ b/celery/worker/state.py @@ -103,11 +103,13 @@ def task_reserved(request, def task_accepted(request, _all_total_count=None, + add_request=requests.__setitem__, add_active_request=active_requests.add, add_to_total_count=total_count.update): """Update global state when a task has been accepted.""" if not _all_total_count: _all_total_count = all_total_count + add_request(request.id, request) add_active_request(request) add_to_total_count({request.name: 1}) all_total_count[0] += 1 From cd3486d5f54e9fa7b3ac2d76432ce0b1400e476b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 29 Nov 2022 18:23:17 +0200 Subject: [PATCH 0258/1051] Fix bug in TraceInfo._log_error() where the real exception obj was hiding behind 'ExceptionWithTraceback' (#7930) * Fix bug in TraceInfo._log_error() where the real exception obj was hiding behind 'ExceptionWithTraceback' * Commit 629bc63cb516031fdbe360b69de9b60fbe3a2034 introduced a bug in test_execute_jail_failure. This reverts the bug in the test, now that the real bug is fixed in the TraceInfo._log_error() method --- celery/app/trace.py | 4 +++- t/unit/worker/test_request.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/celery/app/trace.py b/celery/app/trace.py index 5307620d342..37eb57ef591 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -10,7 +10,7 @@ from collections import namedtuple from warnings import warn -from billiard.einfo import ExceptionInfo +from billiard.einfo import ExceptionInfo, ExceptionWithTraceback from kombu.exceptions import EncodeError from kombu.serialization import loads as loads_message from kombu.serialization import prepare_accept_content @@ -238,6 +238,8 @@ def handle_failure(self, task, req, store_errors=True, call_errbacks=True): def _log_error(self, task, req, einfo): eobj = einfo.exception = get_pickled_exception(einfo.exception) + if isinstance(eobj, ExceptionWithTraceback): + eobj = einfo.exception = eobj.exc exception, traceback, exc_info, sargs, skwargs = ( safe_repr(eobj), safe_str(einfo.traceback), diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index ef312f44a51..bd63561f0cc 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -155,7 +155,7 @@ def test_execute_jail_failure(self): self.app, uuid(), self.mytask_raising.name, {}, [4], {}, ) assert isinstance(ret, ExceptionInfo) - assert ret.exception.exc.args == (4,) + assert ret.exception.args == (4,) def test_execute_task_ignore_result(self): @self.app.task(shared=False, ignore_result=True) From 788dfe4543175c7d7438f5b43b28906c6476b14d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 29 Nov 2022 21:19:51 +0200 Subject: [PATCH 0259/1051] Added integration test: test_all_tasks_of_canvas_are_stamped() for validating stamping works with complex canvas on all tasks per the doc (#7931) --- t/integration/test_canvas.py | 45 ++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index ffb1de27687..cc88050092a 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3203,3 +3203,48 @@ def on_signature(self, sig, **headers) -> dict: stamped_task.stamp(visitor=CustomStampingVisitor()) stamped_task.apply_async().get() assert assertion_result + + def test_all_tasks_of_canvas_are_stamped(self, manager, subtests): + """ Test that complex canvas are stamped correctly """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + @task_received.connect + def task_received_handler(**kwargs): + request = kwargs['request'] + nonlocal assertion_result + + assertion_result = all([ + assertion_result, + all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), + request.stamps['stamp'] == 42 + ]) + + # Using a list because pytest.mark.parametrize does not play well + canvas = [ + add.s(1, 1), + group(add.s(1, 1), add.s(2, 2)), + chain(add.s(1, 1), add.s(2, 2)), + chord([add.s(1, 1), add.s(2, 2)], xsum.s()), + chain(group(add.s(0, 0)), add.s(-1)), + add.s(1, 1) | add.s(10), + group(add.s(1, 1) | add.s(10), add.s(2, 2) | add.s(20)), + chain(add.s(1, 1) | add.s(10), add.s(2) | add.s(20)), + chord([add.s(1, 1) | add.s(10), add.s(2, 2) | add.s(20)], xsum.s()), + chain(chain(add.s(1, 1) | add.s(10), add.s(2) | add.s(20)), add.s(3) | add.s(30)), + chord(group(chain(add.s(1, 1), add.s(2)), chord([add.s(3, 3), add.s(4, 4)], xsum.s())), xsum.s()), + ] + + for sig in canvas: + with subtests.test(msg='Assert all tasks are stamped'): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': 42} + + stamped_task = sig + stamped_task.stamp(visitor=CustomStampingVisitor()) + assertion_result = True + stamped_task.apply_async().get() + assert assertion_result From 5c703572a1527a32b5644d21a25d37f488c78dbb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 30 Nov 2022 15:41:32 +0200 Subject: [PATCH 0260/1051] Added new example for the stamping mechanism: examples/stamping (#7933) --- examples/stamping/config.py | 7 ++++ examples/stamping/myapp.py | 52 ++++++++++++++++++++++++ examples/stamping/shell.py | 75 +++++++++++++++++++++++++++++++++++ examples/stamping/tasks.py | 48 ++++++++++++++++++++++ examples/stamping/visitors.py | 14 +++++++ 5 files changed, 196 insertions(+) create mode 100644 examples/stamping/config.py create mode 100644 examples/stamping/myapp.py create mode 100644 examples/stamping/shell.py create mode 100644 examples/stamping/tasks.py create mode 100644 examples/stamping/visitors.py diff --git a/examples/stamping/config.py b/examples/stamping/config.py new file mode 100644 index 00000000000..e3d8869ad9c --- /dev/null +++ b/examples/stamping/config.py @@ -0,0 +1,7 @@ +from celery import Celery + +app = Celery( + 'myapp', + broker='redis://', + backend='redis://', +) diff --git a/examples/stamping/myapp.py b/examples/stamping/myapp.py new file mode 100644 index 00000000000..54d387e9f1d --- /dev/null +++ b/examples/stamping/myapp.py @@ -0,0 +1,52 @@ +"""myapp.py + +This is a simple example of how to use the stamping feature. +It uses a custom stamping visitor to stamp a workflow with a unique +monitoring id stamp (per task), and a different visitor to stamp the last +task in the workflow. The last task is stamped with a consistent stamp, which +is used to revoke the task by its stamped header using two different approaches: +1. Run the workflow, then revoke the last task by its stamped header. +2. Revoke the last task by its stamped header before running the workflow. + +Usage:: + + # The worker service reacts to messages by executing tasks. + (window1)$ celery -A myapp worker -l INFO + + # The shell service is used to run the example. + (window2)$ celery -A myapp shell + + # Use (copy) the content of shell.py to run the workflow via the + # shell service. + + # Use one of two demo runs via the shell service: + # 1) run_then_revoke(): Run the workflow and revoke the last task + # by its stamped header during its run. + # 2) revoke_then_run(): Revoke the last task by its stamped header + # before its run, then run the workflow. + # + # See worker logs for output per defined in task_received_handler(). +""" +import json + +# Import tasks in worker context +import tasks # noqa: F401 +from config import app + +from celery.signals import task_received + + +@task_received.connect +def task_received_handler( + sender=None, + request=None, + signal=None, + **kwargs +): + print(f'In {signal.name} for: {repr(request)}') + print(f'Found stamps: {request.stamped_headers}') + print(json.dumps(request.stamps, indent=4, sort_keys=True)) + + +if __name__ == '__main__': + app.start() diff --git a/examples/stamping/shell.py b/examples/stamping/shell.py new file mode 100644 index 00000000000..8cf1373d3bd --- /dev/null +++ b/examples/stamping/shell.py @@ -0,0 +1,75 @@ +from time import sleep + +from tasks import identity, mul, wait_for_revoke, xsum +from visitors import MonitoringIdStampingVisitor + +from celery.canvas import Signature, chain, chord, group +from celery.result import AsyncResult + + +def create_canvas(n: int) -> Signature: + """Creates a canvas to calculate: n * sum(1..n) * 10 + For example, if n = 3, the result is 3 * (1 + 2 + 3) * 10 = 180 + """ + canvas = chain( + group(identity.s(i) for i in range(1, n+1)) | xsum.s(), + chord(group(mul.s(10) for _ in range(1, n+1)), xsum.s()), + ) + + return canvas + + +def revoke_by_headers(result: AsyncResult, terminate: bool) -> None: + """Revokes the last task in the workflow by its stamped header + + Arguments: + result (AsyncResult): Can be either a frozen or a running result + terminate (bool): If True, the revoked task will be terminated + """ + result.revoke_by_stamped_headers({'mystamp': 'I am a stamp!'}, terminate=terminate) + + +def prepare_workflow() -> Signature: + """Creates a canvas that waits "n * sum(1..n) * 10" in seconds, + with n = 3. + + The canvas itself is stamped with a unique monitoring id stamp per task. + The waiting task is stamped with different consistent stamp, which is used + to revoke the task by its stamped header. + """ + canvas = create_canvas(n=3) + canvas.stamp(MonitoringIdStampingVisitor()) + canvas = canvas | wait_for_revoke.s() + return canvas + + +def run_then_revoke(): + """Runs the workflow and lets the waiting task run for a while. + Then, the waiting task is revoked by its stamped header. + + The expected outcome is that the canvas will be calculated to the end, + but the waiting task will be revoked and terminated *during its run*. + + See worker logs for more details. + """ + canvas = prepare_workflow() + result = canvas.delay() + print('Wait 5 seconds, then revoke the last task by its stamped header: "mystamp": "I am a stamp!"') + sleep(5) + print('Revoking the last task...') + revoke_by_headers(result, terminate=True) + + +def revoke_then_run(): + """Revokes the waiting task by its stamped header before it runs. + Then, run the workflow, which will not run the waiting task that was revoked. + + The expected outcome is that the canvas will be calculated to the end, + but the waiting task will not run at all. + + See worker logs for more details. + """ + canvas = prepare_workflow() + result = canvas.freeze() + revoke_by_headers(result, terminate=False) + result = canvas.delay() diff --git a/examples/stamping/tasks.py b/examples/stamping/tasks.py new file mode 100644 index 00000000000..0cb3e113809 --- /dev/null +++ b/examples/stamping/tasks.py @@ -0,0 +1,48 @@ +from time import sleep + +from config import app + +from celery import Task +from examples.stamping.visitors import MyStampingVisitor + + +class MyTask(Task): + """Custom task for stamping on replace""" + + def on_replace(self, sig): + sig.stamp(MyStampingVisitor()) + return super().on_replace(sig) + + +@app.task +def identity(x): + """Identity function""" + return x + + +@app.task +def mul(x: int, y: int) -> int: + """Multiply two numbers""" + return x * y + + +@app.task +def xsum(numbers: list) -> int: + """Sum a list of numbers""" + return sum(numbers) + + +@app.task +def waitfor(seconds: int) -> None: + """Wait for "seconds" seconds, ticking every second.""" + print(f'Waiting for {seconds} seconds...') + for i in range(seconds): + sleep(1) + print(f'{i+1} seconds passed') + + +@app.task(bind=True, base=MyTask) +def wait_for_revoke(self: MyTask, seconds: int) -> None: + """Replace this task with a new task that waits for "seconds" seconds.""" + # This will stamp waitfor with MyStampingVisitor + self.replace(waitfor.s(seconds)) diff --git a/examples/stamping/visitors.py b/examples/stamping/visitors.py new file mode 100644 index 00000000000..0b7e462014f --- /dev/null +++ b/examples/stamping/visitors.py @@ -0,0 +1,14 @@ +from uuid import uuid4 + +from celery.canvas import StampingVisitor + + +class MyStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'mystamp': 'I am a stamp!'} + + +class MonitoringIdStampingVisitor(StampingVisitor): + + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': str(uuid4())} From b2f456b8ea563f3e85af839a15d72e28907e9d09 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 1 Dec 2022 15:39:30 +0200 Subject: [PATCH 0261/1051] Fixed a bug where replacing a stamped task and stamping it again during the replacement, would remove the original stamp from the `stamps` key (stamped_headers key does have it, as it should). Tested with new integration test: test_replace_merge_stamps() --- celery/app/task.py | 14 +++++++---- examples/stamping/shell.py | 2 +- t/integration/tasks.py | 26 +++++++++++++++++++++ t/integration/test_canvas.py | 45 +++++++++++++++++++++++++++++++----- 4 files changed, 76 insertions(+), 11 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 22794fd16de..099f6290fca 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -8,7 +8,7 @@ from celery import current_app, states from celery._state import _task_stack -from celery.canvas import GroupStampingVisitor, _chain, group, signature +from celery.canvas import _chain, group, signature from celery.exceptions import Ignore, ImproperlyConfigured, MaxRetriesExceededError, Reject, Retry from celery.local import class_property from celery.result import EagerResult, denied_join_result @@ -953,10 +953,16 @@ def replace(self, sig): for t in reversed(self.request.chain or []): sig |= signature(t, app=self.app) # Stamping sig with parents groups - stamped_headers = self.request.stamped_headers if self.request.stamps: - groups = self.request.stamps.get("groups") - sig.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) + stamped_headers = self.request.stamped_headers.copy() + stamps = self.request.stamps.copy() + stamped_headers.extend(sig.options.get('stamped_headers', [])) + stamps.update({ + stamp: value + for stamp, value in sig.options.items() if stamp in sig.options.get('stamped_headers', []) + }) + sig.options['stamped_headers'] = stamped_headers + sig.options.update(stamps) return self.on_replace(sig) diff --git a/examples/stamping/shell.py b/examples/stamping/shell.py index 8cf1373d3bd..3d2b48bb1a3 100644 --- a/examples/stamping/shell.py +++ b/examples/stamping/shell.py @@ -38,8 +38,8 @@ def prepare_workflow() -> Signature: to revoke the task by its stamped header. """ canvas = create_canvas(n=3) - canvas.stamp(MonitoringIdStampingVisitor()) canvas = canvas | wait_for_revoke.s() + canvas.stamp(MonitoringIdStampingVisitor()) return canvas diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 64f9512f4b6..00312d2c78a 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -2,6 +2,7 @@ from time import sleep from celery import Signature, Task, chain, chord, group, shared_task +from celery.canvas import StampingVisitor from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger @@ -421,3 +422,28 @@ def errback_old_style(request_id): def errback_new_style(request, exc, tb): redis_count(request.id) return request.id + + +class StampOnReplace(StampingVisitor): + stamp = {'StampOnReplace': 'This is the replaced task'} + + def on_signature(self, sig, **headers) -> dict: + return self.stamp + + +class StampedTaskOnReplace(Task): + """Custom task for stamping on replace""" + + def on_replace(self, sig): + sig.stamp(StampOnReplace()) + return super().on_replace(sig) + + +@shared_task +def replaced_with_me(): + return True + + +@shared_task(bind=True, base=StampedTaskOnReplace) +def replace_with_stamped_task(self: StampedTaskOnReplace): + self.replace(replaced_with_me.s()) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index cc88050092a..af4c59d43ae 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -17,12 +17,13 @@ from . import tasks from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection -from .tasks import (ExpectedException, add, add_chord_to_chord, add_replaced, add_to_all, add_to_all_to_chord, - build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, - errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, print_unicode, - raise_error, redis_count, redis_echo, redis_echo_group_id, replace_with_chain, - replace_with_chain_which_raises, replace_with_empty_chain, retry_once, return_exception, - return_priority, second_order_replace1, tsum, write_to_file_and_return_int, xsum) +from .tasks import (ExpectedException, StampOnReplace, add, add_chord_to_chord, add_replaced, add_to_all, + add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, + delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, + ids, print_unicode, raise_error, redis_count, redis_echo, redis_echo_group_id, + replace_with_chain, replace_with_chain_which_raises, replace_with_empty_chain, + replace_with_stamped_task, retry_once, return_exception, return_priority, second_order_replace1, + tsum, write_to_file_and_return_int, xsum) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) @@ -3248,3 +3249,35 @@ def on_signature(self, sig, **headers) -> dict: assertion_result = True stamped_task.apply_async().get() assert assertion_result + + def test_replace_merge_stamps(self, manager): + """ Test that replacing a task keeps the previous and new stamps """ + + @task_received.connect + def task_received_handler(**kwargs): + request = kwargs['request'] + nonlocal assertion_result + expected_stamp_key = list(StampOnReplace.stamp.keys())[0] + expected_stamp_value = list(StampOnReplace.stamp.values())[0] + + assertion_result = all([ + assertion_result, + all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), + request.stamps['stamp'] == 42, + request.stamps[expected_stamp_key] == expected_stamp_value + if 'replaced_with_me' in request.task_name else True + ]) + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'stamp': 42} + + stamped_task = replace_with_stamped_task.s() + stamped_task.stamp(visitor=CustomStampingVisitor()) + assertion_result = False + stamped_task.delay() + assertion_result = True + sleep(1) + # stamped_task needs to be stamped with CustomStampingVisitor + # and the replaced task with both CustomStampingVisitor and StampOnReplace + assert assertion_result, 'All of the tasks should have been stamped' From 5eaa6acc74567523ca5adcf9d1e5177ace70e064 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 1 Dec 2022 22:38:39 +0200 Subject: [PATCH 0262/1051] The bugfix in PR #7934 created a new bug with nested group stamping on task replace. (#7935) This adds a new test case to reproduce it + fix. New test case: test_replace_group_merge_stamps() --- celery/app/task.py | 4 +++- t/integration/tasks.py | 8 +++++--- t/integration/test_canvas.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 099f6290fca..c2d9784da33 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -8,7 +8,7 @@ from celery import current_app, states from celery._state import _task_stack -from celery.canvas import _chain, group, signature +from celery.canvas import GroupStampingVisitor, _chain, group, signature from celery.exceptions import Ignore, ImproperlyConfigured, MaxRetriesExceededError, Reject, Retry from celery.local import class_property from celery.result import EagerResult, denied_join_result @@ -954,6 +954,8 @@ def replace(self, sig): sig |= signature(t, app=self.app) # Stamping sig with parents groups if self.request.stamps: + groups = self.request.stamps.get("groups") + sig.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=self.request.stamped_headers)) stamped_headers = self.request.stamped_headers.copy() stamps = self.request.stamps.copy() stamped_headers.extend(sig.options.get('stamped_headers', [])) diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 00312d2c78a..d551f06768d 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -2,7 +2,7 @@ from time import sleep from celery import Signature, Task, chain, chord, group, shared_task -from celery.canvas import StampingVisitor +from celery.canvas import StampingVisitor, signature from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger @@ -445,5 +445,7 @@ def replaced_with_me(): @shared_task(bind=True, base=StampedTaskOnReplace) -def replace_with_stamped_task(self: StampedTaskOnReplace): - self.replace(replaced_with_me.s()) +def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): + if replace_with is None: + replace_with = replaced_with_me.s() + self.replace(signature(replace_with)) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index af4c59d43ae..1cb683b3d5e 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3281,3 +3281,31 @@ def on_signature(self, sig, **headers) -> dict: # stamped_task needs to be stamped with CustomStampingVisitor # and the replaced task with both CustomStampingVisitor and StampOnReplace assert assertion_result, 'All of the tasks should have been stamped' + + def test_replace_group_merge_stamps(self, manager): + """ Test that replacing a group signature keeps the previous and new group stamps """ + + x = 5 + y = 6 + + @task_received.connect + def task_received_handler(**kwargs): + request = kwargs['request'] + nonlocal assertion_result + nonlocal gid1 + + assertion_result = all([ + assertion_result, + request.stamps['groups'][0] == gid1, + len(request.stamps['groups']) == 2 + if any([request.args == [10, x], request.args == [10, y]]) else True + ]) + + sig = add.s(3, 3) | add.s(4) | group(add.s(x), add.s(y)) + sig = group(add.s(1, 1), add.s(2, 2), replace_with_stamped_task.s(replace_with=sig)) + assertion_result = False + sig.delay() + assertion_result = True + gid1 = sig.options['task_id'] + sleep(1) + assert assertion_result, 'Group stamping is corrupted' From aad5ff1c2e67160df7f09bbe3f38188f0cf2dfbd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 3 Dec 2022 18:17:23 +0200 Subject: [PATCH 0263/1051] Added test_stamping_example_canvas to validate the new stamping example canvas is calculated correctly using automatic tests --- t/integration/tasks.py | 6 ++++++ t/integration/test_canvas.py | 17 ++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/t/integration/tasks.py b/t/integration/tasks.py index d551f06768d..dac9455c38e 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -26,6 +26,12 @@ def add(x, y, z=None): return x + y +@shared_task +def mul(x: int, y: int) -> int: + """Multiply two numbers""" + return x * y + + @shared_task def write_to_file_and_return_int(file_name, i): with open(file_name, mode='a', buffering=1) as file_handle: diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 1cb683b3d5e..d5b852fb0bb 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -20,7 +20,7 @@ from .tasks import (ExpectedException, StampOnReplace, add, add_chord_to_chord, add_replaced, add_to_all, add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, - ids, print_unicode, raise_error, redis_count, redis_echo, redis_echo_group_id, + ids, mul, print_unicode, raise_error, redis_count, redis_echo, redis_echo_group_id, replace_with_chain, replace_with_chain_which_raises, replace_with_empty_chain, replace_with_stamped_task, retry_once, return_exception, return_priority, second_order_replace1, tsum, write_to_file_and_return_int, xsum) @@ -506,6 +506,21 @@ def test_chain_of_a_chord_and_three_tasks_and_a_group(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [8, 8] + def test_stamping_example_canvas(self, manager): + """Test the stamping example canvas from the examples directory""" + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + c = chain( + group(identity.s(i) for i in range(1, 4)) | xsum.s(), + chord(group(mul.s(10) for _ in range(1, 4)), xsum.s()), + ) + + res = c() + assert res.get(timeout=TIMEOUT) == 180 + @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout") def test_nested_chain_group_lone(self, manager): """ From 49334bdd5c081d274992b92efbfe2056c30d5edd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 4 Dec 2022 17:37:08 +0200 Subject: [PATCH 0264/1051] Fixed a bug in losing chain links (not error links though) when unchaining a chain --- celery/canvas.py | 9 ++++++--- t/integration/test_canvas.py | 23 +++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 24 ++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 837364145a0..a39f9e92390 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -960,14 +960,17 @@ def clone(self, *args, **kwargs): def unchain_tasks(self): """Return a list of tasks in the chain. - The tasks list would be cloned from the chain's tasks, - and all of the tasks would be linked to the same error callback + The tasks list would be cloned from the chain's tasks. + All of the chain callbacks would be added to the last task in the (cloned) chain. + All of the tasks would be linked to the same error callback as the chain itself, to ensure that the correct error callback is called if any of the (cloned) tasks of the chain fail. """ # Clone chain's tasks assigning signatures from link_error - # to each task + # to each task and adding the chain's links to the last task. tasks = [t.clone() for t in self.tasks] + for sig in self.options.get('link', []): + tasks[-1].link(sig) for sig in self.options.get('link_error', []): for task in tasks: task.link_error(sig) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index d5b852fb0bb..1544f88dd40 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -618,6 +618,29 @@ def test_chain_with_cb_replaced_with_chain_with_cb(self, manager): assert res.get(timeout=TIMEOUT) == 'Hello world' await_redis_echo({link_msg, 'Hello world'}) + def test_chain_flattening_keep_links_of_inner_chain(self, manager): + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + redis_connection = get_redis_connection() + + link_b_msg = 'link_b called' + link_b_key = 'echo_link_b' + link_b_sig = redis_echo.si(link_b_msg, redis_key=link_b_key) + + def link_chain(sig): + sig.link(link_b_sig) + sig.link_error(identity.s('link_ab')) + return sig + + inner_chain = link_chain(chain(identity.s('a'), add.s('b'))) + flat_chain = chain(inner_chain, add.s('c')) + redis_connection.delete(link_b_key) + res = flat_chain.delay() + + assert res.get(timeout=TIMEOUT) == 'abc' + await_redis_echo((link_b_msg,), redis_key=link_b_key) + def test_chain_with_eb_replaced_with_chain_with_eb( self, manager, subtests ): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 97bc1807858..4a9bcb48c45 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -967,6 +967,30 @@ def test_chain_single_child_group_result(self): mock_apply.assert_called_once_with(chain=[]) assert res is mock_apply.return_value + def test_chain_flattening_keep_links_of_inner_chain(self): + def link_chain(sig): + sig.link(signature('link_b')) + sig.link_error(signature('link_ab')) + return sig + + inner_chain = link_chain(chain(signature('a'), signature('b'))) + assert inner_chain.options['link'][0] == signature('link_b') + assert inner_chain.options['link_error'][0] == signature('link_ab') + assert inner_chain.tasks[0] == signature('a') + assert inner_chain.tasks[0].options == {} + assert inner_chain.tasks[1] == signature('b') + assert inner_chain.tasks[1].options == {} + + flat_chain = chain(inner_chain, signature('c')) + assert flat_chain.options == {} + assert flat_chain.tasks[0].name == 'a' + assert 'link' not in flat_chain.tasks[0].options + assert signature(flat_chain.tasks[0].options['link_error'][0]) == signature('link_ab') + assert flat_chain.tasks[1].name == 'b' + assert 'link' in flat_chain.tasks[1].options, "b is missing the link from inner_chain.options['link'][0]" + assert signature(flat_chain.tasks[1].options['link'][0]) == signature('link_b') + assert signature(flat_chain.tasks[1].options['link_error'][0]) == signature('link_ab') + class test_group(CanvasCase): def test_group_stamping_one_level(self, subtests): From 8b7e9f57ebdf63e4c0ae3644923affa1625e6913 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 8 Nov 2022 09:59:07 +0600 Subject: [PATCH 0265/1051] Removing as not mandatory --- .../workflows/post_release_to_hacker_news.yml | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 .github/workflows/post_release_to_hacker_news.yml diff --git a/.github/workflows/post_release_to_hacker_news.yml b/.github/workflows/post_release_to_hacker_news.yml deleted file mode 100644 index c21287558bd..00000000000 --- a/.github/workflows/post_release_to_hacker_news.yml +++ /dev/null @@ -1,18 +0,0 @@ -on: - release: - types: [released] - -permissions: {} -jobs: - post_release_to_hacker_news: - runs-on: ubuntu-latest - name: Post Release to Hacker News - steps: - - name: Post the Release - uses: MicahLyle/github-action-post-to-hacker-news@v1 - env: - HN_USERNAME: ${{ secrets.HN_USERNAME }} - HN_PASSWORD: ${{ secrets.HN_PASSWORD }} - HN_TITLE_FORMAT_SPECIFIER: Celery v%s Released! - HN_URL_FORMAT_SPECIFIER: https://docs.celeryq.dev/en/v%s/changelog.html - HN_TEST_MODE: true From 3983484defb4564c9baf2a24a6b3af2d0b3c0df7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 11 Dec 2022 09:32:04 +0200 Subject: [PATCH 0266/1051] Housekeeping for Canvas.py (#7942) * Removed pass from @abstractmethod StampingVisitor.on_signature() * Added unit test: test_repr_empty_group() * Added unit test: test_signature_on_error_adds_error_callback() * Cleaned chord.link_error() implementation * Added a new test suite: test_merge_dictionaries * Fixed bug in _merge_dictionaries() function when using None values, tested with test_none_values() * Added test case for "Signature | non-Signature" in unit test: test_OR() * Added new unit test: test_freezing_args_set_in_options() * Added new unit test: test_group_prepared(), for the inner method of group._prepared() * Added unit test for chord: test_link_error_on_chord_header(), using the task_allow_error_cb_on_chord_header flag * Added subtests explanation to test_OR() unit test for "sig | non-sig" test case * Added unit test: test_on_signature_gets_the_signature() * Matched (copied) the unit tests "Install tox" step to the integration tests to have the same command for both --- .github/workflows/python-package.yml | 2 +- celery/canvas.py | 16 +-- t/unit/tasks/test_canvas.py | 144 ++++++++++++++++++++++++++- 3 files changed, 150 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index df76966793a..52c1438a9c3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -120,7 +120,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" - name: Install tox - run: python -m pip install tox + run: python -m pip install --upgrade pip tox tox-gh-actions - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" diff --git a/celery/canvas.py b/celery/canvas.py index a39f9e92390..aadd39003f5 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -92,7 +92,7 @@ def _merge_dictionaries(d1, d2): else: if isinstance(value, (int, float, str)): d1[key] = [value] - if isinstance(d2[key], list): + if isinstance(d2[key], list) and d1[key] is not None: d1[key].extend(d2[key]) else: if d1[key] is None: @@ -161,7 +161,6 @@ def on_signature(self, sig, **headers) -> dict: Returns: Dict: headers to update. """ - pass def on_chord_header_start(self, chord, **header) -> dict: """Method that is called on сhord header stamping start. @@ -2248,13 +2247,14 @@ def link_error(self, errback): applied to the body. """ if self.app.conf.task_allow_error_cb_on_chord_header: - # self.tasks can be a list of the chord header workflow. - if isinstance(self.tasks, (list, tuple)): - for task in self.tasks: - task.link_error(errback) - else: - self.tasks.link_error(errback) + for task in self.tasks: + task.link_error(errback) else: + # Once this warning is removed, the whole method needs to be refactored to: + # 1. link the error callback to each task in the header + # 2. link the error callback to the body + # 3. return the error callback + # In summary, up to 4 lines of code + updating the method docstring. warnings.warn( "task_allow_error_cb_on_chord_header=False is pending deprecation in " "a future release of Celery.\n" diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 4a9bcb48c45..63966b2dadf 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -7,8 +7,9 @@ from celery import Task from celery._state import _task_stack -from celery.canvas import (GroupStampingVisitor, Signature, StampingVisitor, _chain, _maybe_group, chain, chord, - chunks, group, maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) +from celery.canvas import (GroupStampingVisitor, Signature, StampingVisitor, _chain, _maybe_group, + _merge_dictionaries, chain, chord, chunks, group, maybe_signature, maybe_unroll_group, + signature, xmap, xstarmap) from celery.exceptions import Ignore from celery.result import AsyncResult, EagerResult, GroupResult @@ -137,6 +138,20 @@ def __init__(self, *args, **kwargs): class test_Signature(CanvasCase): + @pytest.mark.usefixtures('depends_on_current_app') + def test_on_signature_gets_the_signature(self): + expected_sig = self.add.s(4, 2) + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig, **headers) -> dict: + nonlocal expected_sig + assert actual_sig == expected_sig + return {'header': 'value'} + + sig = expected_sig.clone() + sig.stamp(CustomStampingVisitor()) + assert sig.options['header'] == 'value' + def test_double_stamping(self, subtests): """ Test manual signature stamping with two different stamps. @@ -440,7 +455,7 @@ def test_flatten_links(self): tasks[1].link(tasks[2]) assert tasks[0].flatten_links() == tasks - def test_OR(self): + def test_OR(self, subtests): x = self.add.s(2, 2) | self.mul.s(4) assert isinstance(x, _chain) y = self.add.s(4, 4) | self.div.s(2) @@ -454,6 +469,10 @@ def test_OR(self): assert isinstance(ax, _chain) assert len(ax.tasks), 3 == 'consolidates chain to chain' + with subtests.test('Test chaining with a non-signature object'): + with pytest.raises(TypeError): + assert signature('foo') | None + def test_INVERT(self): x = self.add.s(2, 2) x.apply_async = Mock() @@ -563,6 +582,32 @@ def test_keeping_link_error_on_chaining(self): assert SIG in x.options['link_error'] assert not x.tasks[0].options.get('link_error') + def test_signature_on_error_adds_error_callback(self): + sig = signature('sig').on_error(signature('on_error')) + assert sig.options['link_error'] == [signature('on_error')] + + @pytest.mark.parametrize('_id, group_id, chord, root_id, parent_id, group_index', [ + ('_id', 'group_id', 'chord', 'root_id', 'parent_id', 1), + ]) + def test_freezing_args_set_in_options(self, _id, group_id, chord, root_id, parent_id, group_index): + sig = self.add.s(1, 1) + sig.freeze( + _id=_id, + group_id=group_id, + chord=chord, + root_id=root_id, + parent_id=parent_id, + group_index=group_index, + ) + options = sig.options + + assert options['task_id'] == _id + assert options['group_id'] == group_id + assert options['chord'] == chord + assert options['root_id'] == root_id + assert options['parent_id'] == parent_id + assert options['group_index'] == group_index + class test_xmap_xstarmap(CanvasCase): @@ -1318,6 +1363,10 @@ def test_repr(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) assert repr(x) + def test_repr_empty_group(self): + x = group([]) + assert repr(x) == 'group()' + def test_reverse(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) assert isinstance(signature(x), group) @@ -1701,6 +1750,19 @@ def test_apply_contains_chords_containing_empty_chord(self): # the encapsulated chains - in this case 1 for each child chord mock_set_chord_size.assert_has_calls((call(ANY, 1),) * child_count) + def test_group_prepared(self): + # Using both partial and dict based signatures + sig = group(dict(self.add.s(0)), self.add.s(0)) + _, group_id, root_id = sig._freeze_gid({}) + tasks = sig._prepared(sig.tasks, [42], group_id, root_id, self.app) + + for task, result, group_id in tasks: + assert isinstance(task, Signature) + assert task.args[0] == 42 + assert task.args[1] == 0 + assert isinstance(result, AsyncResult) + assert group_id is not None + class test_chord(CanvasCase): def test_chord_stamping_one_level(self, subtests): @@ -2373,6 +2435,22 @@ def test_chord_upgrade_on_chaining(self): assert isinstance(stil_chord, chord) assert isinstance(stil_chord.body, chord) + @pytest.mark.parametrize('header', [ + [signature('s1'), signature('s2')], + group(signature('s1'), signature('s2')) + ]) + @pytest.mark.usefixtures('depends_on_current_app') + def test_link_error_on_chord_header(self, header): + """ Test that link_error on a chord also links the header """ + self.app.conf.task_allow_error_cb_on_chord_header = True + c = chord(header, signature('body')) + err = signature('err') + errback = c.link_error(err) + assert errback == err + for header_task in c.tasks: + assert header_task.options['link_error'] == [err] + assert c.body.options['link_error'] == [err] + class test_maybe_signature(CanvasCase): @@ -2386,3 +2464,63 @@ def test_is_dict(self): def test_when_sig(self): s = self.add.s() assert maybe_signature(s, app=self.app) is s + + +class test_merge_dictionaries(CanvasCase): + + def test_docstring_example(self): + d1 = {'dict': {'a': 1}, 'list': [1, 2], 'tuple': (1, 2)} + d2 = {'dict': {'b': 2}, 'list': [3, 4], 'set': {'a', 'b'}} + _merge_dictionaries(d1, d2) + assert d1 == { + 'dict': {'a': 1, 'b': 2}, + 'list': [1, 2, 3, 4], + 'tuple': (1, 2), + 'set': {'a', 'b'} + } + + @pytest.mark.parametrize('d1,d2,expected_result', [ + ( + {'None': None}, + {'None': None}, + {'None': [None]} + ), + ( + {'None': None}, + {'None': [None]}, + {'None': [[None]]} + ), + ( + {'None': None}, + {'None': 'Not None'}, + {'None': ['Not None']} + ), + ( + {'None': None}, + {'None': ['Not None']}, + {'None': [['Not None']]} + ), + ( + {'None': [None]}, + {'None': None}, + {'None': [None, None]} + ), + ( + {'None': [None]}, + {'None': [None]}, + {'None': [None, None]} + ), + ( + {'None': [None]}, + {'None': 'Not None'}, + {'None': [None, 'Not None']} + ), + ( + {'None': [None]}, + {'None': ['Not None']}, + {'None': [None, 'Not None']} + ), + ]) + def test_none_values(self, d1, d2, expected_result): + _merge_dictionaries(d1, d2) + assert d1 == expected_result From ae73d5d777feefb4044bc37bbe618cad242202f8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 17:23:54 +0600 Subject: [PATCH 0267/1051] [pre-commit.ci] pre-commit autoupdate (#7927) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/asottile/pyupgrade: v3.2.2 → v3.3.1](https://github.com/asottile/pyupgrade/compare/v3.2.2...v3.3.1) - [github.com/PyCQA/flake8: 5.0.4 → 6.0.0](https://github.com/PyCQA/flake8/compare/5.0.4...6.0.0) - [github.com/pre-commit/pre-commit-hooks: v4.3.0 → v4.4.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.3.0...v4.4.0) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- celery/__init__.py | 3 +-- celery/app/base.py | 3 +-- celery/backends/base.py | 2 +- celery/bin/shell.py | 4 ++-- celery/concurrency/__init__.py | 2 +- celery/concurrency/asynpool.py | 2 +- celery/contrib/testing/app.py | 2 +- celery/security/__init__.py | 2 +- celery/worker/consumer/consumer.py | 2 +- examples/celery_http_gateway/urls.py | 2 +- examples/django/proj/urls.py | 2 +- examples/stamping/myapp.py | 2 +- t/integration/test_canvas.py | 2 +- t/unit/app/test_beat.py | 4 ++-- t/unit/backends/test_database.py | 8 ++++---- t/unit/bin/proj/app2.py | 2 +- t/unit/concurrency/test_eventlet.py | 6 +++--- t/unit/contrib/proj/foo.py | 2 +- t/unit/contrib/test_sphinx.py | 2 +- t/unit/contrib/test_worker.py | 2 +- t/unit/tasks/test_canvas.py | 2 +- t/unit/utils/test_functional.py | 2 +- 23 files changed, 32 insertions(+), 34 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 279949078f8..65933ac32b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.2.2 + rev: v3.3.1 hooks: - id: pyupgrade args: ["--py37-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 + rev: 6.0.0 hooks: - id: flake8 @@ -16,7 +16,7 @@ repos: - id: yesqa - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: check-merge-conflict - id: check-toml diff --git a/celery/__init__.py b/celery/__init__.py index 7c2de763898..aa64b596f0a 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -70,8 +70,7 @@ def debug_import(name, locals=None, globals=None, from celery.app.base import Celery from celery.app.task import Task from celery.app.utils import bugreport - from celery.canvas import (chain, chord, chunks, group, maybe_signature, signature, subtask, xmap, # noqa - xstarmap) + from celery.canvas import chain, chord, chunks, group, maybe_signature, signature, subtask, xmap, xstarmap from celery.utils import uuid # Eventlet/gevent patching must happen before importing diff --git a/celery/app/base.py b/celery/app/base.py index d400cd1c000..d691dcbee61 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -33,8 +33,7 @@ from celery.utils.time import maybe_make_aware, timezone, to_utc # Load all builtin tasks -from . import builtins # noqa -from . import backends +from . import backends, builtins from .annotations import prepare as prepare_annotations from .autoretry import add_autoretry_behaviour from .defaults import DEFAULT_SECURITY_DIGEST, find_deprecated_settings diff --git a/celery/backends/base.py b/celery/backends/base.py index 22710cb3c56..a8bf01a5929 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -397,7 +397,7 @@ def exception_to_python(self, exc): exc = cls(*exc_msg) else: exc = cls(exc_msg) - except Exception as err: # noqa + except Exception as err: exc = Exception(f'{cls}({exc_msg})') return exc diff --git a/celery/bin/shell.py b/celery/bin/shell.py index 77b14d8a307..840bcc3c52f 100644 --- a/celery/bin/shell.py +++ b/celery/bin/shell.py @@ -67,10 +67,10 @@ def _no_ipython(self): # pragma: no cover def _invoke_default_shell(locals): try: - import IPython # noqa + import IPython except ImportError: try: - import bpython # noqa + import bpython except ImportError: _invoke_fallback_shell(locals) else: diff --git a/celery/concurrency/__init__.py b/celery/concurrency/__init__.py index 5fd0d9cad42..54eabfa2543 100644 --- a/celery/concurrency/__init__.py +++ b/celery/concurrency/__init__.py @@ -17,7 +17,7 @@ } try: - import concurrent.futures # noqa: F401 + import concurrent.futures except ImportError: pass else: diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 19715005828..b735e7b1014 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -57,7 +57,7 @@ def __read__(fd, buf, size, read=os.read): return n readcanbuf = False - def unpack_from(fmt, iobuf, unpack=unpack): # noqa + def unpack_from(fmt, iobuf, unpack=unpack): return unpack(fmt, iobuf.getvalue()) # <-- BytesIO __all__ = ('AsynPool',) diff --git a/celery/contrib/testing/app.py b/celery/contrib/testing/app.py index 95ed700b8ec..b8bd9f0d77a 100644 --- a/celery/contrib/testing/app.py +++ b/celery/contrib/testing/app.py @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): def TestApp(name=None, config=None, enable_logging=False, set_as_current=False, log=UnitLogging, backend=None, broker=None, **kwargs): """App used for testing.""" - from . import tasks # noqa + from . import tasks config = dict(deepcopy(DEFAULT_TEST_CONFIG), **config or {}) if broker is not None: config.pop('broker_url', None) diff --git a/celery/security/__init__.py b/celery/security/__init__.py index c801d98b1df..cea3c2ff78f 100644 --- a/celery/security/__init__.py +++ b/celery/security/__init__.py @@ -36,7 +36,7 @@ __all__ = ('setup_security',) try: - import cryptography # noqa + import cryptography except ImportError: raise ImproperlyConfigured(CRYPTOGRAPHY_NOT_INSTALLED) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 6dd93ba7e57..5dec908da67 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -124,7 +124,7 @@ These tasks cannot be acknowledged as the connection is gone, and the tasks are automatically redelivered back to the queue. You can enable this behavior using the worker_cancel_long_running_tasks_on_connection_loss setting. In Celery 5.1 it is set to False by default. The setting will be set to True by default in Celery 6.0. -""" # noqa: E501 +""" def dump_body(m, body): diff --git a/examples/celery_http_gateway/urls.py b/examples/celery_http_gateway/urls.py index 802ff2344b2..7b74284c137 100644 --- a/examples/celery_http_gateway/urls.py +++ b/examples/celery_http_gateway/urls.py @@ -1,5 +1,5 @@ from celery_http_gateway.tasks import hello_world -from django.conf.urls.defaults import handler404, handler500, include, patterns, url # noqa +from django.conf.urls.defaults import handler404, handler500, include, patterns, url from djcelery import views as celery_views # Uncomment the next two lines to enable the admin: diff --git a/examples/django/proj/urls.py b/examples/django/proj/urls.py index 5f67c27b660..74415c35830 100644 --- a/examples/django/proj/urls.py +++ b/examples/django/proj/urls.py @@ -1,4 +1,4 @@ -from django.urls import handler404, handler500, include, url # noqa +from django.urls import handler404, handler500, include, url # Uncomment the next two lines to enable the admin: # from django.contrib import admin diff --git a/examples/stamping/myapp.py b/examples/stamping/myapp.py index 54d387e9f1d..92e68b2cb45 100644 --- a/examples/stamping/myapp.py +++ b/examples/stamping/myapp.py @@ -30,7 +30,7 @@ import json # Import tasks in worker context -import tasks # noqa: F401 +import tasks from config import app from celery.signals import task_received diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 1544f88dd40..47150bfb79e 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -6,7 +6,7 @@ from time import monotonic, sleep import pytest -import pytest_subtests # noqa: F401 +import pytest_subtests from celery import chain, chord, group, signature from celery.backends.base import BaseKeyValueStoreBackend diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 445aa28ed86..94fdb0b464f 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -99,9 +99,9 @@ def test_lt(self): e1 = self.create_entry(schedule=timedelta(seconds=10)) e2 = self.create_entry(schedule=timedelta(seconds=2)) # order doesn't matter, see comment in __lt__ - res1 = e1 < e2 # noqa + res1 = e1 < e2 try: - res2 = e1 < object() # noqa + res2 = e1 < object() except TypeError: pass diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index 511298f9a1b..d6b03145056 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -10,10 +10,10 @@ pytest.importorskip('sqlalchemy') -from celery.backends.database import DatabaseBackend, retry, session, session_cleanup # noqa -from celery.backends.database.models import Task, TaskSet # noqa -from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager # noqa -from t import skip # noqa +from celery.backends.database import DatabaseBackend, retry, session, session_cleanup +from celery.backends.database.models import Task, TaskSet +from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager +from t import skip class SomeClass: diff --git a/t/unit/bin/proj/app2.py b/t/unit/bin/proj/app2.py index 1eedbda5718..3eb4a20a0eb 100644 --- a/t/unit/bin/proj/app2.py +++ b/t/unit/bin/proj/app2.py @@ -1 +1 @@ -import celery # noqa: F401 +import celery diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py index a044d4ae67a..30b57dae0b1 100644 --- a/t/unit/concurrency/test_eventlet.py +++ b/t/unit/concurrency/test_eventlet.py @@ -5,10 +5,10 @@ pytest.importorskip('eventlet') -from greenlet import GreenletExit # noqa +from greenlet import GreenletExit -import t.skip # noqa -from celery.concurrency.eventlet import TaskPool, Timer, apply_target # noqa +import t.skip +from celery.concurrency.eventlet import TaskPool, Timer, apply_target eventlet_modules = ( 'eventlet', diff --git a/t/unit/contrib/proj/foo.py b/t/unit/contrib/proj/foo.py index b6e3d656110..07a628b781c 100644 --- a/t/unit/contrib/proj/foo.py +++ b/t/unit/contrib/proj/foo.py @@ -1,4 +1,4 @@ -from xyzzy import plugh # noqa +from xyzzy import plugh from celery import Celery, shared_task diff --git a/t/unit/contrib/test_sphinx.py b/t/unit/contrib/test_sphinx.py index a4d74e04465..0b2bad28509 100644 --- a/t/unit/contrib/test_sphinx.py +++ b/t/unit/contrib/test_sphinx.py @@ -3,7 +3,7 @@ import pytest try: - from sphinx.application import Sphinx # noqa: F401 + from sphinx.application import Sphinx from sphinx_testing import TestApp sphinx_installed = True except ImportError: diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index 178a974998e..17cf005f175 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -2,7 +2,7 @@ # this import adds a @shared_task, which uses connect_on_app_finalize # to install the celery.ping task that the test lib uses -import celery.contrib.testing.tasks # noqa: F401 +import celery.contrib.testing.tasks from celery import Celery from celery.contrib.testing.worker import start_worker diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 63966b2dadf..1c23b4fa693 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -3,7 +3,7 @@ from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest -import pytest_subtests # noqa: F401 +import pytest_subtests from celery import Task from celery._state import _task_stack diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index 57055a14a6e..9b9ec087e06 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -1,7 +1,7 @@ import collections import pytest -import pytest_subtests # noqa: F401 +import pytest_subtests from kombu.utils.functional import lazy from celery.utils.functional import (DummyContext, first, firstmethod, fun_accepts_kwargs, fun_takes_argument, From 736c8a8803a8348b27b9976f830c59a6c9db79ca Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 13 Dec 2022 07:15:30 -0500 Subject: [PATCH 0268/1051] Scheduled weekly dependency update for week 50 (#7954) * Update sphinx-click from 4.3.0 to 4.4.0 * Update mypy from 0.982 to 0.991 * Update cryptography from 38.0.3 to 38.0.4 * Pin elasticsearch to latest version 8.5.3 * Update pycurl from 7.43.0.5 to 7.45.1 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update requirements/test-ci-default.txt * Update requirements/extras/elasticsearch.txt Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 2 +- requirements/extras/auth.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index cdb836b29cd..d4704e0364e 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery~=2.0.0 Sphinx>=3.0.0 sphinx-testing~=1.0.1 -sphinx-click==4.3.0 +sphinx-click==4.4.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 388c40441b4..2a81f1cb11e 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==38.0.3 +cryptography==38.0.4 diff --git a/requirements/test.txt b/requirements/test.txt index 9e6362c6ab1..cb4b7bf0d4c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,7 +7,7 @@ pytest-order==1.0.1 boto3>=1.9.178 moto>=2.2.6 # typing extensions -mypy==0.982; platform_python_implementation=="CPython" +mypy==0.991; platform_python_implementation=="CPython" pre-commit==2.20.0 -r extras/yaml.txt -r extras/msgpack.txt From 8bba7f90dafff54a7bbe65ca54a6a78b8b82328c Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 14 Dec 2022 14:13:22 +0600 Subject: [PATCH 0269/1051] try pypy 3.9 in CI (#7956) * try Python 3.11 and pypy 3.9 in CI * Update python-package.yml * Update .github/workflows/python-package.yml --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 52c1438a9c3..7a30911874f 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,10 +29,10 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7', 'pypy-3.8'] + python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.9', 'pypy-3.8'] os: ["ubuntu-latest", "windows-latest"] exclude: - - python-version: 'pypy-3.7' + - python-version: 'pypy-3.9' os: "windows-latest" - python-version: 'pypy-3.8' os: "windows-latest" From c2315e50fb53d6722c35cbba01d276d7f9d58fd1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 14 Dec 2022 14:15:13 +0600 Subject: [PATCH 0270/1051] sqlalchemy==1.4.45 (#7943) * sqlalchemy==1.4.44 * sqlalchemy==2.0.0b4 * sqlalchemy==1.4.45 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 0f2e8f033eb..8e2b106495c 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy~=1.4.34 +sqlalchemy==1.4.45 From a8c2a1e1034bff711a60f57332b5b88ed207e8fd Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 14 Dec 2022 16:52:58 +0600 Subject: [PATCH 0271/1051] billiard>=4.1.0,<5.0 (#7957) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 34f4c77b685..f159c7bce7f 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ pytz>=2021.3 -billiard>=4.0.2,<5.0 +billiard>=4.1.0,<5.0 kombu>=5.3.0b2,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 From dd811b37717635b5f7151a7adf9f5bf12e1bc0c6 Mon Sep 17 00:00:00 2001 From: Max Nikitenko Date: Sat, 10 Dec 2022 17:34:31 +0200 Subject: [PATCH 0272/1051] feat(typecheck): allow changing type check behavior on the app level; For example in case you want to implement strict type checking that relay on annotations or the dataclass object you may just set `Celery.type_checker` with valid interface. --- celery/app/base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/celery/app/base.py b/celery/app/base.py index d691dcbee61..73ddf4e0f7d 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -457,6 +457,9 @@ def cons(app): sum([len(args), len(opts)]))) return inner_create_task_cls(**opts) + def type_checker(self, fun, bound=False): + return staticmethod(head_from_fun(fun, bound=bound)) + def _task_from_fun(self, fun, name=None, base=None, bind=False, **options): if not self.finalized and not self.autofinalize: raise RuntimeError('Contract breach: app not finalized') @@ -473,7 +476,7 @@ def _task_from_fun(self, fun, name=None, base=None, bind=False, **options): '__doc__': fun.__doc__, '__module__': fun.__module__, '__annotations__': fun.__annotations__, - '__header__': staticmethod(head_from_fun(fun, bound=bind)), + '__header__': self.type_checker(fun, bound=bind), '__wrapped__': run}, **options))() # for some reason __qualname__ cannot be set in type() # so we have to set it here. From 0d5abd754ffbb8eda3bd591bb1cb2d4d920981cd Mon Sep 17 00:00:00 2001 From: Hiroko Tamagawa Date: Thu, 15 Dec 2022 13:53:55 +0900 Subject: [PATCH 0273/1051] Add broker_channel_error_retry option (#7951) * feat: add broker_channel_error_retry option * docs: add configuration doc * test: add unit test for broker_channel_error_retry * remove empty 76 * docs: add version annotation Co-authored-by: Asif Saif Uddin --- celery/app/defaults.py | 1 + celery/worker/consumer/consumer.py | 6 +++++- docs/userguide/configuration.rst | 13 +++++++++++++ t/unit/worker/test_consumer.py | 26 ++++++++++++++++++++++++++ 4 files changed, 45 insertions(+), 1 deletion(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index ce8d0ae1a90..22d37481bb8 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -89,6 +89,7 @@ def __repr__(self): connection_retry=Option(True, type='bool'), connection_retry_on_startup=Option(None, type='bool'), connection_max_retries=Option(100, type='int'), + channel_error_retry=Option(False, type='bool'), failover_strategy=Option(None, type='string'), heartbeat=Option(120, type='int'), heartbeat_checkrate=Option(3.0, type='int'), diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 5dec908da67..d70dc179c78 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -328,9 +328,13 @@ def start(self): crit('Frequent restarts detected: %r', exc, exc_info=1) sleep(1) self.restart_count += 1 + if self.app.conf.broker_channel_error_retry: + recoverable_errors = (self.connection_errors + self.channel_errors) + else: + recoverable_errors = self.connection_errors try: blueprint.start(self) - except self.connection_errors as exc: + except recoverable_errors as exc: # If we're not retrying connections, we need to properly shutdown or terminate # the Celery main process instead of abruptly aborting the process without any cleanup. is_connection_loss_on_startup = self.restart_count == 0 diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 5350d9fa2af..4372acb2102 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2806,6 +2806,19 @@ to the AMQP broker. If this is set to :const:`0` or :const:`None`, we'll retry forever. +``broker_channel_error_retry`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.3 + +Default: Disabled. + +Automatically try to re-establish the connection to the AMQP broker +if any invalid response has been returned. + +The retry count and interval is the same as that of `broker_connection_retry`. +Also, this option doesn't work when `broker_connection_retry` is `False`. + .. setting:: broker_login_method ``broker_login_method`` diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index f0acc0e8b99..707f6db4302 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, Mock, call, patch import pytest +from amqp import ChannelError from billiard.exceptions import RestartFreqExceeded from celery import bootsteps @@ -310,6 +311,31 @@ def test_blueprint_restart_when_state_not_in_stop_conditions(self, broker_connec c.start() c.blueprint.restart.assert_called_once() + @pytest.mark.parametrize("broker_channel_error_retry", [True, False]) + def test_blueprint_restart_for_channel_errors(self, broker_channel_error_retry): + c = self.get_consumer() + + # ensure that WorkerShutdown is not raised + c.app.conf['broker_connection_retry'] = True + c.app.conf['broker_connection_retry_on_startup'] = True + c.app.conf['broker_channel_error_retry'] = broker_channel_error_retry + c.restart_count = -1 + + # ensure that blueprint state is not in stop conditions + c.blueprint.state = bootsteps.RUN + c.blueprint.start.side_effect = ChannelError() + + # stops test from running indefinitely in the while loop + c.blueprint.restart.side_effect = self._closer(c) + + # restarted only when broker_channel_error_retry is True + if broker_channel_error_retry: + c.start() + c.blueprint.restart.assert_called_once() + else: + with pytest.raises(ChannelError): + c.start() + def test_collects_at_restart(self): c = self.get_consumer() c.connection.collect.side_effect = MemoryError() From 8a92e0ff3f5c09fdd9cf055927f6cddf511dfa12 Mon Sep 17 00:00:00 2001 From: Austin Snoeyink Date: Thu, 15 Dec 2022 02:51:51 -0500 Subject: [PATCH 0274/1051] Add beat_cron_starting_deadline_seconds to prevent unwanted cron runs (#7945) * add beat_cron_starting_deadline to prevent unwanted cron tasks from running * update cron_starting_deadline docs with verisonadded --- CONTRIBUTORS.txt | 1 + celery/app/defaults.py | 1 + celery/schedules.py | 37 ++++++++- docs/userguide/configuration.rst | 13 +++ t/unit/app/test_beat.py | 3 + t/unit/app/test_schedules.py | 133 +++++++++++++++++++++++++++++++ 6 files changed, 185 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index fe420b14d67..e8c1dec868b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -291,3 +291,4 @@ Tizian Seehaus, 2022/02/09 Oleh Romanovskyi, 2022/06/09 JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 +Austin Snoeyink 2022/12/06 diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 22d37481bb8..a9f68689940 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -78,6 +78,7 @@ def __repr__(self): scheduler=Option('celery.beat:PersistentScheduler'), schedule_filename=Option('celerybeat-schedule'), sync_every=Option(0, type='int'), + cron_starting_deadline=Option(None, type=int) ), broker=Namespace( url=Option(None, type='string'), diff --git a/celery/schedules.py b/celery/schedules.py index 62940132098..9798579754f 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -36,7 +36,6 @@ {0._orig_day_of_week} (m/h/dM/MY/d)>\ """ - SOLAR_INVALID_LATITUDE = """\ Argument latitude {lat} is invalid, must be between -90 and 90.\ """ @@ -608,16 +607,48 @@ def remaining_estimate(self, last_run_at, ffwd=ffwd): def is_due(self, last_run_at): """Return tuple of ``(is_due, next_time_to_run)``. + If :setting:`beat_cron_starting_deadline` has been specified, the + scheduler will make sure that the `last_run_at` time is within the + deadline. This prevents tasks that could have been run according to + the crontab, but didn't, from running again unexpectedly. + Note: Next time to run is in seconds. SeeAlso: :meth:`celery.schedules.schedule.is_due` for more information. """ + rem_delta = self.remaining_estimate(last_run_at) - rem = max(rem_delta.total_seconds(), 0) + rem_secs = rem_delta.total_seconds() + rem = max(rem_secs, 0) due = rem == 0 - if due: + + deadline_secs = self.app.conf.beat_cron_starting_deadline + has_passed_deadline = False + if deadline_secs is not None: + # Make sure we're looking at the latest possible feasible run + # date when checking the deadline. + last_date_checked = last_run_at + last_feasible_rem_secs = rem_secs + while rem_secs < 0: + last_date_checked = last_date_checked + abs(rem_delta) + rem_delta = self.remaining_estimate(last_date_checked) + rem_secs = rem_delta.total_seconds() + if rem_secs < 0: + last_feasible_rem_secs = rem_secs + + # if rem_secs becomes 0 or positive, second-to-last + # last_date_checked must be the last feasible run date. + # Check if the last feasible date is within the deadline + # for running + has_passed_deadline = -last_feasible_rem_secs > deadline_secs + if has_passed_deadline: + # Should not be due if we've passed the deadline for looking + # at past runs + due = False + + if due or has_passed_deadline: rem_delta = self.remaining_estimate(self.now()) rem = max(rem_delta.total_seconds(), 0) return schedstate(due, rem) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 4372acb2102..fbc22200cbd 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3508,3 +3508,16 @@ changes to the schedule into account. Also when running Celery beat embedded (:option:`-B `) on Jython as a thread the max interval is overridden and set to 1 so that it's possible to shut down in a timely manner. + +.. setting:: beat_cron_starting_deadline + +``beat_cron_starting_deadline`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.3 + +Default: None. + +When using cron, the number of seconds :mod:`~celery.bin.beat` can look back +when deciding whether a cron schedule is due. When set to `None`, cronjobs that +are past due will always run immediately. diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 94fdb0b464f..84f36d04f86 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -696,16 +696,19 @@ def now_func(): 'first_missed', 'first_missed', last_run_at=now_func() - timedelta(minutes=2), total_run_count=10, + app=self.app, schedule=app_schedule['first_missed']['schedule']), 'second_missed': beat.ScheduleEntry( 'second_missed', 'second_missed', last_run_at=now_func() - timedelta(minutes=2), total_run_count=10, + app=self.app, schedule=app_schedule['second_missed']['schedule']), 'non_missed': beat.ScheduleEntry( 'non_missed', 'non_missed', last_run_at=now_func() - timedelta(minutes=2), total_run_count=10, + app=self.app, schedule=app_schedule['non_missed']['schedule']), } diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index ec3baedce85..d6f555c2cf2 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -800,3 +800,136 @@ def test_yearly_execution_is_not_due(self): due, remaining = self.yearly.is_due(datetime(2009, 3, 12, 7, 30)) assert not due assert remaining == 4 * 24 * 60 * 60 - 3 * 60 * 60 + + def test_execution_not_due_if_task_not_run_at_last_feasible_time_outside_deadline( + self): + """If the crontab schedule was added after the task was due, don't + immediately fire the task again""" + # could have feasibly been run on 12/5 at 7:30, but wasn't. + self.app.conf.beat_cron_starting_deadline = 3600 + last_run = datetime(2022, 12, 4, 10, 30) + now = datetime(2022, 12, 5, 10, 30) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert not due + + def test_execution_not_due_if_task_not_run_at_last_feasible_time_no_deadline_set( + self): + """Same as above test except there's no deadline set, so it should be + due""" + last_run = datetime(2022, 12, 4, 10, 30) + now = datetime(2022, 12, 5, 10, 30) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert due + + def test_execution_due_if_task_not_run_at_last_feasible_time_within_deadline( + self): + # Could have feasibly been run on 12/5 at 7:30, but wasn't. We are + # still within a 1 hour deadline from the + # last feasible run, so the task should still be due. + self.app.conf.beat_cron_starting_deadline = 3600 + last_run = datetime(2022, 12, 4, 10, 30) + now = datetime(2022, 12, 5, 8, 0) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert due + + def test_execution_due_if_task_not_run_at_any_feasible_time_within_deadline( + self): + # Could have feasibly been run on 12/4 at 7:30, or 12/5 at 7:30, + # but wasn't. We are still within a 1 hour + # deadline from the last feasible run (12/5), so the task should + # still be due. + self.app.conf.beat_cron_starting_deadline = 3600 + last_run = datetime(2022, 12, 3, 10, 30) + now = datetime(2022, 12, 5, 8, 0) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert due + + def test_execution_not_due_if_task_not_run_at_any_feasible_time_outside_deadline( + self): + """Verifies that remaining is still the time to the next + feasible run date even though the original feasible date + was passed over in favor of a newer one.""" + # Could have feasibly been run on 12/4 or 12/5 at 7:30, + # but wasn't. + self.app.conf.beat_cron_starting_deadline = 3600 + last_run = datetime(2022, 12, 3, 10, 30) + now = datetime(2022, 12, 5, 11, 0) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert not due + + def test_execution_not_due_if_last_run_in_future(self): + # Should not run if the last_run hasn't happened yet. + last_run = datetime(2022, 12, 6, 7, 30) + now = datetime(2022, 12, 5, 10, 30) + expected_next_execution_time = datetime(2022, 12, 7, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert not due + assert remaining == expected_remaining + + def test_execution_not_due_if_last_run_at_last_feasible_time(self): + # Last feasible time is 12/5 at 7:30 + last_run = datetime(2022, 12, 5, 7, 30) + now = datetime(2022, 12, 5, 10, 30) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert not due + + def test_execution_not_due_if_last_run_past_last_feasible_time(self): + # Last feasible time is 12/5 at 7:30 + last_run = datetime(2022, 12, 5, 8, 30) + now = datetime(2022, 12, 5, 10, 30) + expected_next_execution_time = datetime(2022, 12, 6, 7, 30) + expected_remaining = ( + expected_next_execution_time - now).total_seconds() + + # Run the daily (7:30) crontab with the current date + with patch_crontab_nowfun(self.daily, now): + due, remaining = self.daily.is_due(last_run) + assert remaining == expected_remaining + assert not due From 795a8e252140f2c62db047f2b993e0454e659bce Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Dec 2022 17:08:41 +0000 Subject: [PATCH 0275/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/isort: 5.10.1 → v5.11.3](https://github.com/pycqa/isort/compare/5.10.1...v5.11.3) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 65933ac32b1..16d19389cbc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: v5.11.3 hooks: - id: isort From f3a2cf45a69b443cac6c79a5c85583c8bd91b0a3 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 19 Dec 2022 13:08:09 -0500 Subject: [PATCH 0276/1051] Scheduled weekly dependency update for week 51 (#7965) * Pin isort to latest version 5.11.3 * Pin elasticsearch to latest version 8.5.3 * Update pycurl from 7.43.0.5 to 7.45.2 * Update requirements/extras/elasticsearch.txt * Update requirements/test-ci-default.txt Co-authored-by: Asif Saif Uddin --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index fbc54e32a4e..b6425608a53 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -3,4 +3,4 @@ git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git vine>=5.0.0 -isort~=5.10.1 +isort==5.11.3 From bc1d3268582b6d7d9975c1b2f5acc813fa4fd710 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 20 Dec 2022 18:39:50 +0200 Subject: [PATCH 0277/1051] Added doc to "retry_errors" newly supported field of "publish_retry_policy" of the task namespace (#7967) --- docs/userguide/calling.rst | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 038a43dce18..10fd1e4414d 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -356,6 +356,31 @@ and can contain the following keys: Maximum number of seconds (float or integer) to wait between retries. Default is 0.2. +- `retry_errors` + + `retry_errors` is a tuple of exception classes that should be retried. + It will be ignored if not specified. Default is None (ignored). + + .. warning:: + + If you specify a tuple of exception classes, you must make sure + that you also specify the ``max_retries`` option, otherwise + you will get an error. + + For example, if you want to retry only tasks that were timed out, you can use + :exc:`~kombu.exceptions.TimeoutError`: + + .. code-block:: python + + from kombu.exceptions import TimeoutError + + add.apply_async((2, 2), retry=True, retry_policy={ + 'max_retries': 3, + 'retry_errors': (TimeoutError, ), + }) + + .. versionadded:: 5.3 + For example, the default policy correlates to: .. code-block:: python @@ -365,6 +390,7 @@ For example, the default policy correlates to: 'interval_start': 0, 'interval_step': 0.2, 'interval_max': 0.2, + 'retry_errors': None, }) the maximum time spent retrying will be 0.4 seconds. It's set relatively From f67931b998113c5cce334d771722d51d6a49d6e5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 20 Dec 2022 19:16:36 +0200 Subject: [PATCH 0278/1051] Renamed from master to main in the docs and the CI workflows --- .github/ISSUE_TEMPLATE/Bug-Report.md | 26 +++++++++---------- .../Documentation-Bug-Report.md | 4 +-- .github/ISSUE_TEMPLATE/Enhancement.md | 4 +-- .github/ISSUE_TEMPLATE/Feature-Request.md | 4 +-- .../Major-Version-Release-Checklist.md | 2 +- .../Minor-Version-Release-Checklist.md | 2 +- .github/PULL_REQUEST_TEMPLATE.md | 6 ++--- .github/workflows/changerelease.yml | 2 +- .github/workflows/codeql-analysis.yml | 4 +-- .github/workflows/python-package.yml | 4 +-- CONTRIBUTING.rst | 18 ++++++------- Changelog.rst | 2 +- README.rst | 16 ++++++------ docs/django/first-steps-with-django.rst | 2 +- docs/includes/installation.txt | 10 +++---- docs/includes/resources.txt | 2 +- docs/templates/readme.txt | 6 ++--- docs/userguide/concurrency/eventlet.rst | 2 +- docs/userguide/daemonizing.rst | 8 +++--- 19 files changed, 62 insertions(+), 62 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/Bug-Report.md b/.github/ISSUE_TEMPLATE/Bug-Report.md index bdf95bffb5e..b38217f9add 100644 --- a/.github/ISSUE_TEMPLATE/Bug-Report.md +++ b/.github/ISSUE_TEMPLATE/Bug-Report.md @@ -16,17 +16,17 @@ bug reports which are incomplete. -- [ ] I have verified that the issue exists against the `master` branch of Celery. +- [ ] I have verified that the issue exists against the `main` branch of Celery. - [ ] This has already been asked to the [discussions forum](https://github.com/celery/celery/discussions) first. - [ ] I have read the relevant section in the - [contribution guide](https://docs.celeryq.dev/en/master/contributing.html#other-bugs) + [contribution guide](https://docs.celeryq.dev/en/main/contributing.html#other-bugs) on reporting bugs. - [ ] I have checked the [issues list](https://github.com/celery/celery/issues?q=is%3Aissue+label%3A%22Issue+Type%3A+Bug+Report%22+-label%3A%22Category%3A+Documentation%22) for similar or identical bug reports. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?q=is%3Apr+label%3A%22PR+Type%3A+Bugfix%22+-label%3A%22Category%3A+Documentation%22) for existing proposed fixes. -- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/master) - to find out if the bug was already fixed in the master branch. +- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/main) + to find out if the bug was already fixed in the main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). @@ -35,7 +35,7 @@ To check an item on the list replace [ ] with [x]. - [ ] I have included the output of ``celery -A proj report`` in the issue. (if you are not able to do this, then at least specify the Celery version affected). -- [ ] I have verified that the issue exists against the `master` branch of Celery. +- [ ] I have verified that the issue exists against the `main` branch of Celery. - [ ] I have included the contents of ``pip freeze`` in the issue. - [ ] I have included all the versions of all the external dependencies required to reproduce this bug. @@ -96,14 +96,14 @@ on the subject: https://help.github.com/en/articles/autolinked-references-and-ur ## Required Dependencies -* **Minimal Python Version**: N/A or Unknown -* **Minimal Celery Version**: N/A or Unknown -* **Minimal Kombu Version**: N/A or Unknown -* **Minimal Broker Version**: N/A or Unknown -* **Minimal Result Backend Version**: N/A or Unknown -* **Minimal OS and/or Kernel Version**: N/A or Unknown -* **Minimal Broker Client Version**: N/A or Unknown -* **Minimal Result Backend Client Version**: N/A or Unknown +- **Minimal Python Version**: N/A or Unknown +- **Minimal Celery Version**: N/A or Unknown +- **Minimal Kombu Version**: N/A or Unknown +- **Minimal Broker Version**: N/A or Unknown +- **Minimal Result Backend Version**: N/A or Unknown +- **Minimal OS and/or Kernel Version**: N/A or Unknown +- **Minimal Broker Client Version**: N/A or Unknown +- **Minimal Result Backend Client Version**: N/A or Unknown ### Python Packages diff --git a/.github/ISSUE_TEMPLATE/Documentation-Bug-Report.md b/.github/ISSUE_TEMPLATE/Documentation-Bug-Report.md index af5c97152a5..97f341dbc40 100644 --- a/.github/ISSUE_TEMPLATE/Documentation-Bug-Report.md +++ b/.github/ISSUE_TEMPLATE/Documentation-Bug-Report.md @@ -21,8 +21,8 @@ To check an item on the list replace [ ] with [x]. for similar or identical bug reports. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?q=is%3Apr+label%3A%22Category%3A+Documentation%22) for existing proposed fixes. -- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/master) - to find out if the bug was already fixed in the master branch. +- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/main) + to find out if the bug was already fixed in the main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). diff --git a/.github/ISSUE_TEMPLATE/Enhancement.md b/.github/ISSUE_TEMPLATE/Enhancement.md index 7bcffae9458..3174256ac14 100644 --- a/.github/ISSUE_TEMPLATE/Enhancement.md +++ b/.github/ISSUE_TEMPLATE/Enhancement.md @@ -21,9 +21,9 @@ To check an item on the list replace [ ] with [x]. for similar or identical enhancement to an existing feature. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?q=is%3Apr+label%3A%22Issue+Type%3A+Enhancement%22+-label%3A%22Category%3A+Documentation%22) for existing proposed enhancements. -- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/master) +- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/main) to find out if the if the same enhancement was already implemented in the - master branch. + main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). diff --git a/.github/ISSUE_TEMPLATE/Feature-Request.md b/.github/ISSUE_TEMPLATE/Feature-Request.md index 18fe7044f7a..5de9452a55c 100644 --- a/.github/ISSUE_TEMPLATE/Feature-Request.md +++ b/.github/ISSUE_TEMPLATE/Feature-Request.md @@ -21,9 +21,9 @@ To check an item on the list replace [ ] with [x]. for similar or identical feature requests. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?utf8=%E2%9C%93&q=is%3Apr+label%3A%22PR+Type%3A+Feature%22+) for existing proposed implementations of this feature. -- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/master) +- [ ] I have checked the [commit log](https://github.com/celery/celery/commits/main) to find out if the same feature was already implemented in the - master branch. + main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). diff --git a/.github/ISSUE_TEMPLATE/Major-Version-Release-Checklist.md b/.github/ISSUE_TEMPLATE/Major-Version-Release-Checklist.md index c805cc5ae16..fcc81ec0aa9 100644 --- a/.github/ISSUE_TEMPLATE/Major-Version-Release-Checklist.md +++ b/.github/ISSUE_TEMPLATE/Major-Version-Release-Checklist.md @@ -20,7 +20,7 @@ Release PR: - [ ] Milestone is 100% done - [ ] Merge Freeze - [ ] Release PR reviewed -- [ ] The master branch build passes +- [ ] The main branch build passes [![Build Status](https://github.com/celery/celery/actions/workflows/python-package.yml/badge.svg)](https://github.com/celery/celery/actions/workflows/python-package.yml) - [ ] Release Notes diff --git a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md index f6717b485c7..a6343b27bbc 100644 --- a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md +++ b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md @@ -14,7 +14,7 @@ Release PR: - [ ] Release PR drafted - [ ] Release PR reviewed -- [ ] The master branch build passes +- [ ] The main branch build passes [![Build Status](https://github.com/celery/celery/actions/workflows/python-package.yml/badge.svg)](https://github.com/celery/celery/actions/workflows/python-package.yml) - [ ] Release Notes diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b9e27ef0915..f9e0765d935 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,12 +1,12 @@ *Note*: Before submitting this pull request, please review our [contributing -guidelines](https://docs.celeryq.dev/en/master/contributing.html). +guidelines](https://docs.celeryq.dev/en/main/contributing.html). ## Description 1 + group_sig, # --> [1+3, 1+4] --> [4, 5] + chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 + sig_2 # --> 18 + 2 --> 20 + ) + callback = signature('callback_task') + errback = signature('errback_task') + chain_sig.stamp(visitor=CustomStampingVisitor()) + chain_sig.link(callback) + chain_sig.link_error(errback) + chain_sig_res = chain_sig.apply_async() + chain_sig_res.get() + + with subtests.test("Confirm the chain was executed correctly", result=20): + # Before we run our assersions, let's confirm the base functionality of the chain is working + # as expected including the links stamping. + assert chain_sig_res.result == 20 + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): + for result in group_sig_res.results: + assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) + + with subtests.test("callback is stamped with custom visitor", + stamped_headers=["header", "groups, on_callback"]): + callback_link = chain_sig.options['link'][0] + headers = callback_link.options + stamped_headers = headers['stamped_headers'] + assert 'on_callback' not in stamped_headers, "Linking after stamping should not stamp the callback" + assert sorted(stamped_headers) == sorted(["header", "groups"]) + assert headers['header'] == 'value' + + with subtests.test("errback is stamped with custom visitor", + stamped_headers=["header", "groups, on_errback"]): + errback_link = chain_sig.options['link_error'][0] + headers = errback_link.options + stamped_headers = headers['stamped_headers'] + assert 'on_callback' not in stamped_headers, "Linking after stamping should not stamp the errback" + assert sorted(stamped_headers) == sorted(["header", "groups"]) + assert headers['header'] == 'value' + @pytest.mark.usefixtures('depends_on_current_app') def test_callback_stamping_on_replace(self, subtests): class CustomStampingVisitor(StampingVisitor): From e54780f8003a16295ff91009f6eb380d8c0e1a06 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 12 Jan 2023 03:18:39 +0600 Subject: [PATCH 0287/1051] sqlalchemy==1.4.46 (#7995) * sqlalchemy==1.4.46 * Update requirements/extras/sqlalchemy.txt --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 8e2b106495c..41f620f3cc8 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy==1.4.45 +sqlalchemy>=1.4.46 From 5a3872433d444099c375c272f0f8db2ec0952a9f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 21 Jan 2023 22:33:36 +0600 Subject: [PATCH 0288/1051] pytz (#8002) * pytz (>dev) * Update requirements/dev.txt * pytz --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 0a889789713..7936822a2de 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,4 @@ -pytz>dev +pytz git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git From fcd8fdd725f7766d7b9d28f4fc828b18b05fb19d Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Tue, 24 Jan 2023 07:14:28 -0500 Subject: [PATCH 0289/1051] Fix few typos, provide configuration + workflow for codespell to catch any new (#8023) * Rudimentary codespellrc configuration * [DATALAD RUNCMD] Make misspelling reserv to be called as a full task_reserved it patches === Do not change lines below === { "chain": [], "cmd": "sed -i -e 's,reserv\\>,task_reserved,g' t/unit/worker/test_consumer.py", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^ * [DATALAD RUNCMD] Rename passt into pass_value to not confuse codespell === Do not change lines below === { "chain": [], "cmd": "sed -i -e 's,passt\\>,pass_value,g' ./t/unit/tasks/test_canvas.py ./t/unit/tasks/test_chord.py ./t/unit/tasks/test_context.py ./t/unit/tasks/test_result.py ./t/unit/tasks/test_states.py ./t/unit/tasks/test_tasks.py ./t/unit/tasks/test_trace.py", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^ * strat -> strategy, padd -> pad, custom typo * [DATALAD RUNCMD] Run codespell -w === Do not change lines below === { "chain": [], "cmd": "codespell -w", "exit": 0, "extra_inputs": [], "inputs": [], "outputs": [], "pwd": "." } ^^^ Do not change lines above ^^^ --- .codespellrc | 4 ++++ celery/canvas.py | 2 +- celery/security/certificate.py | 4 ++-- celery/security/key.py | 4 ++-- docs/userguide/canvas.rst | 4 ++-- docs/userguide/workers.rst | 2 +- t/unit/tasks/test_result.py | 4 ++-- t/unit/tasks/test_trace.py | 8 ++++---- t/unit/worker/test_consumer.py | 16 ++++++++-------- t/unit/worker/test_worker.py | 4 ++-- 10 files changed, 28 insertions(+), 24 deletions(-) create mode 100644 .codespellrc diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 00000000000..e35a7588699 --- /dev/null +++ b/.codespellrc @@ -0,0 +1,4 @@ +[codespell] +skip = .git,.venv,*.svg,package-lock.json,*.key +# Some names and timezone (lower cased) +ignore-words-list = gool,markey,sherif,wil,ist,fromm,brane,bu,nott diff --git a/celery/canvas.py b/celery/canvas.py index a211f21226f..8c09a9c5b90 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1104,7 +1104,7 @@ def prepare_steps(self, args, kwargs, tasks, app (Celery): The Celery app instance. last_task_id (str): The id of the last task in the chain. group_id (str): The id of the group that the chain is a part of. - chord_body (Signature): The body of the chord, used to syncronize with the chain's + chord_body (Signature): The body of the chord, used to synchronize with the chain's last task and the chord's body when used together. clone (bool): Whether to clone the chain's tasks before modifying them. from_dict (Callable): A function that takes a dict and returns a Signature. diff --git a/celery/security/certificate.py b/celery/security/certificate.py index d259734cb13..ebc8cd630d7 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -52,12 +52,12 @@ def verify(self, data, signature, digest): """Verify signature for string containing data.""" with reraise_errors('Bad signature: {0!r}'): - padd = padding.PSS( + pad = padding.PSS( mgf=padding.MGF1(digest), salt_length=padding.PSS.MAX_LENGTH) self.get_pubkey().verify(signature, - ensure_bytes(data), padd, digest) + ensure_bytes(data), pad, digest) class CertStore: diff --git a/celery/security/key.py b/celery/security/key.py index d001059077f..ae932b2b762 100644 --- a/celery/security/key.py +++ b/celery/security/key.py @@ -28,8 +28,8 @@ def sign(self, data, digest): """Sign string containing data.""" with reraise_errors('Unable to sign data: {0!r}'): - padd = padding.PSS( + pad = padding.PSS( mgf=padding.MGF1(digest), salt_length=padding.PSS.MAX_LENGTH) - return self._key.sign(ensure_bytes(data), padd, digest) + return self._key.sign(ensure_bytes(data), pad, digest) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 14f7d5f6e9d..c147777cb98 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1278,7 +1278,7 @@ visitor will be applied to the callback as well. The callback must be linked to the signature before stamping. -For example, lets examine the following custome stamping visitor. +For example, lets examine the following custom stamping visitor. .. code-block:: python @@ -1337,4 +1337,4 @@ This will result in the following stamps: >>> c.body.options['link'][0].options {'header': 'value', 'on_callback': True, 'groups': [], 'stamped_headers': ['header', 'on_callback', 'groups']} >>> c.body.options['link_error'][0].options - {'header': 'value', 'on_errback': True, 'groups': [], 'stamped_headers': ['header', 'on_errback', 'groups']} \ No newline at end of file + {'header': 'value', 'on_errback': True, 'groups': [], 'stamped_headers': ['header', 'on_errback', 'groups']} diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 113afc78e07..e96e80e8c7e 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -489,7 +489,7 @@ and each task that has a stamped header matching the key-value pair(s) will be r .. warning:: This command may perform poorly if your worker pool concurrency is high - and terminate is enabled, since it will have to iterate over all the runnig + and terminate is enabled, since it will have to iterate over all the running tasks to find the ones with the specified stamped header. **Example** diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 818409c97d9..42eaab8987d 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -550,9 +550,9 @@ def test_add(self): def dummy_copy(self): with patch('celery.result.copy') as copy: - def passt(arg): + def pass_value(arg): return arg - copy.side_effect = passt + copy.side_effect = pass_value yield diff --git a/t/unit/tasks/test_trace.py b/t/unit/tasks/test_trace.py index e7767a979f5..3494b52fdfd 100644 --- a/t/unit/tasks/test_trace.py +++ b/t/unit/tasks/test_trace.py @@ -362,10 +362,10 @@ def test_callbacks__sigs(self, group_, maybe_signature): sig3.apply_async = Mock(name='gapply') request = {'callbacks': [sig1, sig3, sig2], 'root_id': 'root'} - def passt(s, *args, **kwargs): + def pass_value(s, *args, **kwargs): return s - maybe_signature.side_effect = passt + maybe_signature.side_effect = pass_value retval, _ = self.trace(self.add, (2, 2), {}, request=request) group_.assert_called_with((4,), parent_id='id-1', root_id='root', priority=None) sig3.apply_async.assert_called_with( @@ -381,10 +381,10 @@ def test_callbacks__only_groups(self, group_, maybe_signature): sig2.apply_async = Mock(name='gapply') request = {'callbacks': [sig1, sig2], 'root_id': 'root'} - def passt(s, *args, **kwargs): + def pass_value(s, *args, **kwargs): return s - maybe_signature.side_effect = passt + maybe_signature.side_effect = pass_value retval, _ = self.trace(self.add, (2, 2), {}, request=request) sig1.apply_async.assert_called_with( (4,), parent_id='id-1', root_id='root', priority=None diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 707f6db4302..eb872ab7a62 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -70,12 +70,12 @@ def test_gevent_bug_disables_connection_timeout(self): assert self.app.conf.broker_connection_timeout is None def test_limit_moved_to_pool(self): - with patch('celery.worker.consumer.consumer.task_reserved') as reserv: + with patch('celery.worker.consumer.consumer.task_reserved') as task_reserved: c = self.get_consumer() c.on_task_request = Mock(name='on_task_request') request = Mock(name='request') c._limit_move_to_pool(request) - reserv.assert_called_with(request) + task_reserved.assert_called_with(request) c.on_task_request.assert_called_with(request) def test_update_prefetch_count(self): @@ -185,11 +185,11 @@ def test_schedule_bucket_request(self): with patch( 'celery.worker.consumer.consumer.Consumer._limit_move_to_pool' - ) as reserv: + ) as task_reserved: bucket.contents.append((request, 3)) c._schedule_bucket_request(bucket) bucket.can_consume.assert_called_with(3) - reserv.assert_called_with(request) + task_reserved.assert_called_with(request) bucket.can_consume.return_value = False bucket.contents = deque() @@ -218,10 +218,10 @@ def test_limit_task(self): with patch( 'celery.worker.consumer.consumer.Consumer._schedule_bucket_request' - ) as reserv: + ) as task_reserved: c._limit_task(request, bucket, 1) bucket.add.assert_called_with((request, 1)) - reserv.assert_called_with(bucket) + task_reserved.assert_called_with(bucket) def test_post_eta(self): c = self.get_consumer() @@ -231,11 +231,11 @@ def test_post_eta(self): with patch( 'celery.worker.consumer.consumer.Consumer._schedule_bucket_request' - ) as reserv: + ) as task_reserved: c._limit_post_eta(request, bucket, 1) c.qos.decrement_eventually.assert_called_with() bucket.add.assert_called_with((request, 1)) - reserv.assert_called_with(bucket) + task_reserved.assert_called_with(bucket) def test_max_restarts_exceeded(self): c = self.get_consumer() diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index cfa67440b4c..a0fd468e27b 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -220,8 +220,8 @@ def test_receive_message_InvalidTaskError(self, error): Mock(), self.foo_task.name, args=(1, 2), kwargs='foobarbaz', id=1) c.update_strategies() - strat = c.strategies[self.foo_task.name] = Mock(name='strategy') - strat.side_effect = InvalidTaskError() + strategy = c.strategies[self.foo_task.name] = Mock(name='strategy') + strategy.side_effect = InvalidTaskError() callback = self._get_on_message(c) callback(m) From d879a4aa11ef91cbce3fd6629f9e98c3974f55ca Mon Sep 17 00:00:00 2001 From: arnisjuraga Date: Sat, 28 Jan 2023 14:51:40 +0200 Subject: [PATCH 0290/1051] RabbitMQ links update (#8031) Updating outdated RabbitMQ links in basic documentation --- docs/getting-started/backends-and-brokers/rabbitmq.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index 430844bdfec..d5359843db1 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -31,7 +31,7 @@ Installing the RabbitMQ Server See `Installing RabbitMQ`_ over at RabbitMQ's website. For macOS see `Installing RabbitMQ on macOS`_. -.. _`Installing RabbitMQ`: http://www.rabbitmq.com/install.html +.. _`Downloading and Installing RabbitMQ`: https://www.rabbitmq.com/download.html .. note:: @@ -69,9 +69,9 @@ Substitute in appropriate values for ``myuser``, ``mypassword`` and ``myvhost`` See the RabbitMQ `Admin Guide`_ for more information about `access control`_. -.. _`Admin Guide`: http://www.rabbitmq.com/admin-guide.html +.. _`Admin Guide`: https://www.rabbitmq.com/admin-guide.html -.. _`access control`: http://www.rabbitmq.com/admin-guide.html#access-control +.. _`access control`: https://www.rabbitmq.com/access-control.html .. _rabbitmq-macOS-installation: From 919a8481243034385e248a2bdb54894631e59eac Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Sun, 29 Jan 2023 13:32:02 +0100 Subject: [PATCH 0291/1051] Ignore files generated by tests (#7846) --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index b821c3f1dd8..d892eca06e5 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,6 @@ pip-wheel-metadata/ .python-version .vscode/ integration-tests-config.json +[0-9]* +statefilename.* +dump.rdb From f59405a63f1a28a5d205965a792e3a26f15e2923 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 29 Jan 2023 21:19:48 +0200 Subject: [PATCH 0292/1051] Revert "sqlalchemy==1.4.46 (#7995)" (#8033) This reverts commit e54780f8003a16295ff91009f6eb380d8c0e1a06. --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 41f620f3cc8..8e2b106495c 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy>=1.4.46 +sqlalchemy==1.4.45 From 6cb3d877677c75ea809c1594268c29d7863aa8c6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 31 Jan 2023 14:06:52 +0600 Subject: [PATCH 0293/1051] [pre-commit.ci] pre-commit autoupdate (#8039) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/isort: 5.11.4 → 5.12.0](https://github.com/pycqa/isort/compare/5.11.4...5.12.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 80dd8461a19..89da9e05051 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,7 +24,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.11.4 + rev: 5.12.0 hooks: - id: isort From 7d92046bb5a515d3ee5107218b983b9c13169a2a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 31 Jan 2023 15:20:35 +0200 Subject: [PATCH 0294/1051] Fixed bug with replacing a stamped task with a chain or a group (inc. links/errlinks) (#8034) * Added unit test: test_replacing_stamped_canvas_with_tasks() * Fixed bug where a replaced task did not pass on its stamps to the replaced task.tasks if it had (e.g group, chain replaced tasks did not have their .tasks stamped) * Fixed missing links stamping when using Task.replace on a stamped sig with stamped links * Update t/unit/tasks/test_canvas.py Co-authored-by: Omer Katz * Added comment in code for the link stamping inside replace() --------- Co-authored-by: Omer Katz --- celery/app/task.py | 22 +++++++ t/unit/tasks/test_canvas.py | 113 ++++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) diff --git a/celery/app/task.py b/celery/app/task.py index c2d9784da33..1fed26393ce 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -959,6 +959,7 @@ def replace(self, sig): stamped_headers = self.request.stamped_headers.copy() stamps = self.request.stamps.copy() stamped_headers.extend(sig.options.get('stamped_headers', [])) + stamped_headers = list(set(stamped_headers)) stamps.update({ stamp: value for stamp, value in sig.options.items() if stamp in sig.options.get('stamped_headers', []) @@ -966,6 +967,27 @@ def replace(self, sig): sig.options['stamped_headers'] = stamped_headers sig.options.update(stamps) + # Collecting all of the links (callback/errback) to stamp them + links = sig.options['link'] if 'link' in sig.options else [] + links.extend(sig.options['link_error'] if 'link_error' in sig.options else []) + + if hasattr(sig, "tasks"): + tasks = sig.tasks + if isinstance(tasks, group): + tasks = tasks.tasks + for task in tasks: + task.options['stamped_headers'] = stamped_headers + task.options.update(stamps) + links.extend(task.options['link'] if 'link' in task.options else []) + links.extend(task.options['link_error'] if 'link_error' in task.options else []) + + for link in links: + link_stamped_headers = stamped_headers.copy() + link_stamped_headers.extend(link['options'].get('stamped_headers', [])) + link_stamped_headers = list(set(link_stamped_headers)) + link['options']['stamped_headers'] = link_stamped_headers + link['options'].update(stamps) + return self.on_replace(sig) def add_to_chord(self, sig, lazy=False): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 10864d44e31..e0cae2a1b40 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -456,6 +456,119 @@ def on_replace(self, sig): assert headers['on_errback'] is True assert headers['header'] == 'value' + @pytest.mark.parametrize('sig_to_replace', [ + group(signature(f'sig{i}') for i in range(2)), + group([signature('sig1'), signature('sig2')]), + group((signature('sig1'), signature('sig2'))), + group(signature('sig1'), signature('sig2')), + chain(signature('sig1'), signature('sig2')), + ]) + @pytest.mark.usefixtures('depends_on_current_app') + def test_replacing_stamped_canvas_with_tasks(self, subtests, sig_to_replace): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + class MyTask(Task): + def on_replace(self, sig): + nonlocal assertion_result + nonlocal failed_task + tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks + assertion_result = len(tasks) == 2 + for task in tasks: + assertion_result = all([ + assertion_result, + 'header' in task.options['stamped_headers'], + all([header in task.options for header in task.options['stamped_headers']]), + ]) + if not assertion_result: + failed_task = task + break + + return super().on_replace(sig) + + @self.app.task(shared=False, bind=True, base=MyTask) + def replace_from_MyTask(self): + # Allows easy assertion for the test without using Mock + return self.replace(sig_to_replace) + + sig = replace_from_MyTask.s() + sig.stamp(CustomStampingVisitor()) + assertion_result = False + failed_task = None + sig.apply() + assert assertion_result, f"Task {failed_task} was not stamped correctly" + + @pytest.mark.usefixtures('depends_on_current_app') + def test_replacing_stamped_canvas_with_tasks_with_links(self): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {'header': 'value'} + + class MyTask(Task): + def on_replace(self, sig): + nonlocal assertion_result + nonlocal failed_task + nonlocal failed_task_link + tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks + assertion_result = True + for task in tasks: + links = task.options['link'] + links.extend(task.options['link_error']) + for link in links: + assertion_result = all([ + assertion_result, + all([ + stamped_header in link['options'] + for stamped_header in link['options']['stamped_headers'] + ]), + ]) + else: + if not assertion_result: + failed_task_link = link + break + + assertion_result = all([ + assertion_result, + task.options['stamped_headers']['header'] == 'value', + all([ + header in task.options + for header in task.options['stamped_headers'] + ]), + ]) + + if not assertion_result: + failed_task = task + break + + return super().on_replace(sig) + + @self.app.task(shared=False, bind=True, base=MyTask) + def replace_from_MyTask(self): + # Allows easy assertion for the test without using Mock + return self.replace(sig_to_replace) + + s1 = chain(signature('foo11'), signature('foo12')) + s1.link(signature('link_foo1')) + s1.link_error(signature('link_error_foo1')) + + s2 = chain(signature('foo21'), signature('foo22')) + s2.link(signature('link_foo2')) + s2.link_error(signature('link_error_foo2')) + + sig_to_replace = group([s1, s2]) + sig = replace_from_MyTask.s() + sig.stamp(CustomStampingVisitor()) + assertion_result = False + failed_task = None + failed_task_link = None + sig.apply() + + err_msg = f"Task {failed_task} was not stamped correctly" if failed_task else \ + f"Task link {failed_task_link} was not stamped correctly" if failed_task_link else \ + "Assertion failed" + assert assertion_result, err_msg + def test_getitem_property_class(self): assert Signature.task assert Signature.args From 417caa661db7cbaa3c79b85cb96c5f2f7399fccc Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Feb 2023 19:20:18 +0200 Subject: [PATCH 0295/1051] Fixed formatting in setup.cfg that caused flake8 to misbehave (#8044) * Fixed formatting in setup.cfg that caused flake8 to misbehave * Fixed all flake8 errors --- celery/__init__.py | 3 ++- celery/app/base.py | 2 +- celery/backends/base.py | 2 +- celery/bin/shell.py | 4 ++-- celery/concurrency/__init__.py | 2 +- celery/concurrency/asynpool.py | 2 +- celery/contrib/pytest.py | 2 +- celery/contrib/testing/app.py | 2 +- celery/contrib/testing/manager.py | 4 ++-- celery/contrib/testing/mocks.py | 6 +++--- celery/contrib/testing/worker.py | 6 +++--- celery/events/state.py | 2 +- celery/security/__init__.py | 2 +- celery/utils/collections.py | 2 +- celery/utils/log.py | 2 +- celery/utils/saferepr.py | 2 +- celery/utils/text.py | 2 +- celery/worker/consumer/consumer.py | 6 +++--- examples/celery_http_gateway/urls.py | 2 +- examples/django/proj/urls.py | 2 +- examples/stamping/myapp.py | 2 +- setup.cfg | 27 ++++++++++++++++++--------- t/integration/test_canvas.py | 2 +- t/unit/app/test_beat.py | 4 ++-- t/unit/backends/test_database.py | 8 ++++---- t/unit/bin/proj/app2.py | 2 +- t/unit/concurrency/test_eventlet.py | 6 +++--- t/unit/contrib/proj/foo.py | 2 +- t/unit/contrib/test_sphinx.py | 2 +- t/unit/contrib/test_worker.py | 2 +- t/unit/tasks/test_canvas.py | 2 +- t/unit/utils/test_functional.py | 2 +- 32 files changed, 64 insertions(+), 54 deletions(-) diff --git a/celery/__init__.py b/celery/__init__.py index aa64b596f0a..7c2de763898 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -70,7 +70,8 @@ def debug_import(name, locals=None, globals=None, from celery.app.base import Celery from celery.app.task import Task from celery.app.utils import bugreport - from celery.canvas import chain, chord, chunks, group, maybe_signature, signature, subtask, xmap, xstarmap + from celery.canvas import (chain, chord, chunks, group, maybe_signature, signature, subtask, xmap, # noqa + xstarmap) from celery.utils import uuid # Eventlet/gevent patching must happen before importing diff --git a/celery/app/base.py b/celery/app/base.py index 73ddf4e0f7d..8281f5510b9 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -33,7 +33,7 @@ from celery.utils.time import maybe_make_aware, timezone, to_utc # Load all builtin tasks -from . import backends, builtins +from . import backends, builtins # noqa from .annotations import prepare as prepare_annotations from .autoretry import add_autoretry_behaviour from .defaults import DEFAULT_SECURITY_DIGEST, find_deprecated_settings diff --git a/celery/backends/base.py b/celery/backends/base.py index a8bf01a5929..22710cb3c56 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -397,7 +397,7 @@ def exception_to_python(self, exc): exc = cls(*exc_msg) else: exc = cls(exc_msg) - except Exception as err: + except Exception as err: # noqa exc = Exception(f'{cls}({exc_msg})') return exc diff --git a/celery/bin/shell.py b/celery/bin/shell.py index 840bcc3c52f..77b14d8a307 100644 --- a/celery/bin/shell.py +++ b/celery/bin/shell.py @@ -67,10 +67,10 @@ def _no_ipython(self): # pragma: no cover def _invoke_default_shell(locals): try: - import IPython + import IPython # noqa except ImportError: try: - import bpython + import bpython # noqa except ImportError: _invoke_fallback_shell(locals) else: diff --git a/celery/concurrency/__init__.py b/celery/concurrency/__init__.py index 54eabfa2543..4953f463f01 100644 --- a/celery/concurrency/__init__.py +++ b/celery/concurrency/__init__.py @@ -17,7 +17,7 @@ } try: - import concurrent.futures + import concurrent.futures # noqa except ImportError: pass else: diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index b735e7b1014..19715005828 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -57,7 +57,7 @@ def __read__(fd, buf, size, read=os.read): return n readcanbuf = False - def unpack_from(fmt, iobuf, unpack=unpack): + def unpack_from(fmt, iobuf, unpack=unpack): # noqa return unpack(fmt, iobuf.getvalue()) # <-- BytesIO __all__ = ('AsynPool',) diff --git a/celery/contrib/pytest.py b/celery/contrib/pytest.py index fae69fc5368..d1f8279f9b0 100644 --- a/celery/contrib/pytest.py +++ b/celery/contrib/pytest.py @@ -1,7 +1,7 @@ """Fixtures and testing utilities for :pypi:`pytest `.""" import os from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Mapping, Sequence, Union +from typing import TYPE_CHECKING, Any, Mapping, Sequence, Union # noqa import pytest diff --git a/celery/contrib/testing/app.py b/celery/contrib/testing/app.py index b8bd9f0d77a..95ed700b8ec 100644 --- a/celery/contrib/testing/app.py +++ b/celery/contrib/testing/app.py @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): def TestApp(name=None, config=None, enable_logging=False, set_as_current=False, log=UnitLogging, backend=None, broker=None, **kwargs): """App used for testing.""" - from . import tasks + from . import tasks # noqa config = dict(deepcopy(DEFAULT_TEST_CONFIG), **config or {}) if broker is not None: config.pop('broker_url', None) diff --git a/celery/contrib/testing/manager.py b/celery/contrib/testing/manager.py index 69b7e287615..28f05716079 100644 --- a/celery/contrib/testing/manager.py +++ b/celery/contrib/testing/manager.py @@ -4,13 +4,13 @@ from collections import defaultdict from functools import partial from itertools import count -from typing import Any, Callable, Dict, Sequence, TextIO, Tuple +from typing import Any, Callable, Dict, Sequence, TextIO, Tuple # noqa from kombu.utils.functional import retry_over_time from celery import states from celery.exceptions import TimeoutError -from celery.result import AsyncResult, ResultSet +from celery.result import AsyncResult, ResultSet # noqa from celery.utils.text import truncate from celery.utils.time import humanize_seconds as _humanize_seconds diff --git a/celery/contrib/testing/mocks.py b/celery/contrib/testing/mocks.py index a7c00d4d033..4ec79145527 100644 --- a/celery/contrib/testing/mocks.py +++ b/celery/contrib/testing/mocks.py @@ -1,11 +1,11 @@ """Useful mocks for unit testing.""" import numbers from datetime import datetime, timedelta -from typing import Any, Mapping, Sequence +from typing import Any, Mapping, Sequence # noqa from unittest.mock import Mock -from celery import Celery -from celery.canvas import Signature +from celery import Celery # noqa +from celery.canvas import Signature # noqa def TaskMessage( diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index bf24b30b1c8..d01e82c6bfc 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -2,10 +2,10 @@ import os import threading from contextlib import contextmanager -from typing import Any, Iterable, Union +from typing import Any, Iterable, Union # noqa -import celery.worker.consumer -from celery import Celery, worker +import celery.worker.consumer # noqa +from celery import Celery, worker # noqa from celery.result import _set_task_join_will_block, allow_join_result from celery.utils.dispatch import Signal from celery.utils.nodenames import anon_nodename diff --git a/celery/events/state.py b/celery/events/state.py index f6fc2a59d4f..3449991354a 100644 --- a/celery/events/state.py +++ b/celery/events/state.py @@ -22,7 +22,7 @@ from itertools import islice from operator import itemgetter from time import time -from typing import Mapping, Optional +from typing import Mapping, Optional # noqa from weakref import WeakSet, ref from kombu.clocks import timetuple diff --git a/celery/security/__init__.py b/celery/security/__init__.py index cea3c2ff78f..c801d98b1df 100644 --- a/celery/security/__init__.py +++ b/celery/security/__init__.py @@ -36,7 +36,7 @@ __all__ = ('setup_security',) try: - import cryptography + import cryptography # noqa except ImportError: raise ImproperlyConfigured(CRYPTOGRAPHY_NOT_INSTALLED) diff --git a/celery/utils/collections.py b/celery/utils/collections.py index dc32404c0f4..d03e0169a83 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -6,7 +6,7 @@ from heapq import heapify, heappop, heappush from itertools import chain, count from queue import Empty -from typing import Any, Dict, Iterable, List +from typing import Any, Dict, Iterable, List # noqa from .functional import first, uniq from .text import match_case diff --git a/celery/utils/log.py b/celery/utils/log.py index 1765a611f45..4e8fc11ff72 100644 --- a/celery/utils/log.py +++ b/celery/utils/log.py @@ -6,7 +6,7 @@ import threading import traceback from contextlib import contextmanager -from typing import AnyStr, Sequence +from typing import AnyStr, Sequence # noqa from kombu.log import LOG_LEVELS from kombu.log import get_logger as _get_logger diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py index de8d15a9b71..feddd41f0fd 100644 --- a/celery/utils/saferepr.py +++ b/celery/utils/saferepr.py @@ -15,7 +15,7 @@ from itertools import chain from numbers import Number from pprint import _recursion -from typing import Any, AnyStr, Callable, Dict, Iterator, List, Sequence, Set, Tuple +from typing import Any, AnyStr, Callable, Dict, Iterator, List, Sequence, Set, Tuple # noqa from .text import truncate diff --git a/celery/utils/text.py b/celery/utils/text.py index 8f4a321eebb..f7b7571d57b 100644 --- a/celery/utils/text.py +++ b/celery/utils/text.py @@ -5,7 +5,7 @@ from functools import partial from pprint import pformat from textwrap import fill -from typing import Any, List, Mapping, Pattern +from typing import Any, List, Mapping, Pattern # noqa __all__ = ( 'abbr', 'abbrtask', 'dedent', 'dedent_initial', diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index d70dc179c78..c10c9aeb578 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -121,9 +121,9 @@ CANCEL_TASKS_BY_DEFAULT = """ In Celery 5.1 we introduced an optional breaking change which on connection loss cancels all currently executed tasks with late acknowledgement enabled. -These tasks cannot be acknowledged as the connection is gone, and the tasks are automatically redelivered back to the queue. -You can enable this behavior using the worker_cancel_long_running_tasks_on_connection_loss setting. -In Celery 5.1 it is set to False by default. The setting will be set to True by default in Celery 6.0. +These tasks cannot be acknowledged as the connection is gone, and the tasks are automatically redelivered +back to the queue. You can enable this behavior using the worker_cancel_long_running_tasks_on_connection_loss +setting. In Celery 5.1 it is set to False by default. The setting will be set to True by default in Celery 6.0. """ diff --git a/examples/celery_http_gateway/urls.py b/examples/celery_http_gateway/urls.py index 7b74284c137..802ff2344b2 100644 --- a/examples/celery_http_gateway/urls.py +++ b/examples/celery_http_gateway/urls.py @@ -1,5 +1,5 @@ from celery_http_gateway.tasks import hello_world -from django.conf.urls.defaults import handler404, handler500, include, patterns, url +from django.conf.urls.defaults import handler404, handler500, include, patterns, url # noqa from djcelery import views as celery_views # Uncomment the next two lines to enable the admin: diff --git a/examples/django/proj/urls.py b/examples/django/proj/urls.py index 74415c35830..5f67c27b660 100644 --- a/examples/django/proj/urls.py +++ b/examples/django/proj/urls.py @@ -1,4 +1,4 @@ -from django.urls import handler404, handler500, include, url +from django.urls import handler404, handler500, include, url # noqa # Uncomment the next two lines to enable the admin: # from django.contrib import admin diff --git a/examples/stamping/myapp.py b/examples/stamping/myapp.py index 92e68b2cb45..833939f7359 100644 --- a/examples/stamping/myapp.py +++ b/examples/stamping/myapp.py @@ -30,7 +30,7 @@ import json # Import tasks in worker context -import tasks +import tasks # noqa from config import app from celery.signals import task_received diff --git a/setup.cfg b/setup.cfg index 465f266dba7..cd04c7a134b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,15 +8,24 @@ all_files = 1 # whenever it makes the code more readable. max-line-length = 117 extend-ignore = - E203, # incompatible with black https://github.com/psf/black/issues/315#issuecomment-395457972 - D102, # Missing docstring in public method - D104, # Missing docstring in public package - D105, # Missing docstring in magic method - D107, # Missing docstring in __init__ - D401, # First line should be in imperative mood; try rephrasing - D412, # No blank lines allowed between a section header and its content - E741, # ambiguous variable name '...' - E742, # ambiguous class definition '...' + # incompatible with black https://github.com/psf/black/issues/315#issuecomment-395457972 + E203, + # Missing docstring in public method + D102, + # Missing docstring in public package + D104, + # Missing docstring in magic method + D105, + # Missing docstring in __init__ + D107, + # First line should be in imperative mood; try rephrasing + D401, + # No blank lines allowed between a section header and its content + D412, + # ambiguous variable name '...' + E741, + # ambiguous class definition '...' + E742, per-file-ignores = t/*,setup.py,examples/*,docs/*,extra/*: # docstrings diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 3e82efccc98..4d91accb3d0 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -6,7 +6,7 @@ from time import monotonic, sleep import pytest -import pytest_subtests +import pytest_subtests # noqa from celery import chain, chord, group, signature from celery.backends.base import BaseKeyValueStoreBackend diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 84f36d04f86..dd24ecc9708 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -99,9 +99,9 @@ def test_lt(self): e1 = self.create_entry(schedule=timedelta(seconds=10)) e2 = self.create_entry(schedule=timedelta(seconds=2)) # order doesn't matter, see comment in __lt__ - res1 = e1 < e2 + res1 = e1 < e2 # noqa try: - res2 = e1 < object() + res2 = e1 < object() # noqa except TypeError: pass diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index d6b03145056..511298f9a1b 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -10,10 +10,10 @@ pytest.importorskip('sqlalchemy') -from celery.backends.database import DatabaseBackend, retry, session, session_cleanup -from celery.backends.database.models import Task, TaskSet -from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager -from t import skip +from celery.backends.database import DatabaseBackend, retry, session, session_cleanup # noqa +from celery.backends.database.models import Task, TaskSet # noqa +from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager # noqa +from t import skip # noqa class SomeClass: diff --git a/t/unit/bin/proj/app2.py b/t/unit/bin/proj/app2.py index 3eb4a20a0eb..c7572987668 100644 --- a/t/unit/bin/proj/app2.py +++ b/t/unit/bin/proj/app2.py @@ -1 +1 @@ -import celery +import celery # noqa diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py index 30b57dae0b1..a044d4ae67a 100644 --- a/t/unit/concurrency/test_eventlet.py +++ b/t/unit/concurrency/test_eventlet.py @@ -5,10 +5,10 @@ pytest.importorskip('eventlet') -from greenlet import GreenletExit +from greenlet import GreenletExit # noqa -import t.skip -from celery.concurrency.eventlet import TaskPool, Timer, apply_target +import t.skip # noqa +from celery.concurrency.eventlet import TaskPool, Timer, apply_target # noqa eventlet_modules = ( 'eventlet', diff --git a/t/unit/contrib/proj/foo.py b/t/unit/contrib/proj/foo.py index 07a628b781c..b6e3d656110 100644 --- a/t/unit/contrib/proj/foo.py +++ b/t/unit/contrib/proj/foo.py @@ -1,4 +1,4 @@ -from xyzzy import plugh +from xyzzy import plugh # noqa from celery import Celery, shared_task diff --git a/t/unit/contrib/test_sphinx.py b/t/unit/contrib/test_sphinx.py index 0b2bad28509..0a5abceab91 100644 --- a/t/unit/contrib/test_sphinx.py +++ b/t/unit/contrib/test_sphinx.py @@ -3,7 +3,7 @@ import pytest try: - from sphinx.application import Sphinx + from sphinx.application import Sphinx # noqa from sphinx_testing import TestApp sphinx_installed = True except ImportError: diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index 17cf005f175..c729f644264 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -2,7 +2,7 @@ # this import adds a @shared_task, which uses connect_on_app_finalize # to install the celery.ping task that the test lib uses -import celery.contrib.testing.tasks +import celery.contrib.testing.tasks # noqa from celery import Celery from celery.contrib.testing.worker import start_worker diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index e0cae2a1b40..6a46dd994d0 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -3,7 +3,7 @@ from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest -import pytest_subtests +import pytest_subtests # noqa from celery import Task from celery._state import _task_stack diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index 9b9ec087e06..52fdce6a96a 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -1,7 +1,7 @@ import collections import pytest -import pytest_subtests +import pytest_subtests # noqa from kombu.utils.functional import lazy from celery.utils.functional import (DummyContext, first, firstmethod, fun_accepts_kwargs, fun_takes_argument, From 5a2c941aeb6f445edaaafd0abcefe8ce7b74d941 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Feb 2023 20:37:18 +0200 Subject: [PATCH 0296/1051] Removed duplicated import Iterable (#8046) --- t/unit/tasks/test_canvas.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 6a46dd994d0..a22a4ed1ced 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1,5 +1,6 @@ import json import math +from collections.abc import Iterable from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest @@ -20,10 +21,6 @@ 'options': {'task_id': 'TASK_ID'}, 'subtask_type': ''}, ) -try: - from collections.abc import Iterable -except ImportError: - from collections.abc import Iterable def return_True(*args, **kwargs): From cb83eafe59782963cb0254edce23f485f03bfbf1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Feb 2023 23:18:16 +0200 Subject: [PATCH 0297/1051] Limited Sphinx version to <6.0.0 to fix the current doc build issues (#8047) --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index d4704e0364e..dc9fc872228 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ sphinx_celery~=2.0.0 -Sphinx>=3.0.0 +Sphinx>=3.0.0,<6.0.0 sphinx-testing~=1.0.1 sphinx-click==4.4.0 -r extras/sqlalchemy.txt From 6b09f93fb62cb3a9f7446fc2de0854226d4e98c4 Mon Sep 17 00:00:00 2001 From: AJ Jordan Date: Mon, 6 Feb 2023 19:48:35 -0500 Subject: [PATCH 0298/1051] Document --logfile default --- celery/bin/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/celery/bin/base.py b/celery/bin/base.py index c41b6f97005..63a2895758b 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -176,7 +176,8 @@ class CeleryDaemonCommand(CeleryCommand): def __init__(self, *args, **kwargs): """Initialize a Celery command with common daemon options.""" super().__init__(*args, **kwargs) - self.params.append(CeleryOption(('-f', '--logfile'), help_group="Daemonization Options")) + self.params.append(CeleryOption(('-f', '--logfile'), help_group="Daemonization Options", + help="Log destination; defaults to stderr")) self.params.append(CeleryOption(('--pidfile',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--uid',), help_group="Daemonization Options")) self.params.append(CeleryOption(('--gid',), help_group="Daemonization Options")) From 8000bb1bd4f5a0d93bb32224c69264a1acccc1a2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 8 Feb 2023 17:24:56 +0200 Subject: [PATCH 0299/1051] Stamping Mechanism Refactoring (#8045) * Removed all stamping tests from t/unit/tasks/test_canvas.py * Added t/unit/tasks/test_stamping.py * Removed all stamping tests from t/integration/test_canvas.py * Added t/integration/test_stamping.py * Removed GroupStampingVisitor * Fixed bug in _chord.run() where missing "stamped_headers" key in options would cause an exception * Fixed bug in Signature._merge() where missing "stamped_headers" key in options would cause an exception * Stabilized unit tests * Applied black formatting on t/unit/tasks/test_stamping.py * Flake8 error fix * Stabilized integration tests * Applied black formatting on t/integration/test_stamping.py * Fixed test_callback_stamping_link_after_stamp() + other small fixes and cleanups * Applied black formatting on t/unit/tasks/test_stamping.py * Removed useless @pytest.mark.usefixtures("depends_on_current_app") from test_stamping unit tests * Added test_canvas_stamping to unit tests * Added more signature variations to test_canvas_stamping * Refactored test_canvas_stamping (unit tests) to simplify making new tests * Fixed bug in Signature.stamp() raising exception if on_signature() returned None * Fixed bug in _chord.stamp() where the header was not stamped if it was a group object * Added (finished) unit test: test_stamp_in_options * Fixed bug in group/_chord/_chain.stamp() where options["stamped_headers"] was not updated with all stamps from visitor * Added (finished) unit test: test_stamping_headers_in_options * Huge refactoring to the stamping mechanism * Added tests for canvas workflow with links and link errors to test_canvas_stamping * Cleanup * Added unit test: test_stamping_with_replace() * Moved Task.replace's stamping handling to Task.on_replace_stamping + fixed it * Bugfixes to stamping in canvas.py (found via test_stamping_with_replace()) * Fixed test_stamping_with_replace and added type annotations * Fixed bug where stamping without a visitor would lose sync between the stamps themselves and the stamped_headers list * More bug fixes and tests impl optimizations * Removed stamping of group, chain and chord signatures themselves, only their inner tasks will be stamped * Simplified assertion visitors implementation * Added more canvas variations to test_canvas_stamping * Fixed StampsAssersionVisitor * Refactored stamp_links based on all recent changes to the stamping mechanism * Refactored test_canvas_stamping to allow adding custom testing visitors easily * Cleanup * Added ListStampingVisitor to t/unit/tasks/test_stamping.py * Added SetStampingVisitor to t/unit/tasks/test_stamping.py * Optimized test cases in test_canvas_stamping * Optimized test cases in test_canvas_stamping * Optimized test cases in test_canvas_stamping * Added UUIDStampingVisitor to t/unit/tasks/test_stamping.py * Optimized test cases in test_canvas_stamping * Added StringStampingVisitor to t/unit/tasks/test_stamping.py * Optimized test cases in test_canvas_stamping * Improved assertion + Cleanup + English typo fix * Applied black formatting on t/integration/test_stamping.py * Update celery/canvas.py Co-authored-by: Omer Katz * Optimized _stamp_headers() * pre-commit fix * Reduced use of deepcopy where .copy() is enough * Refactored Task.on_replace() to allow using an external visitor when replacing a task during stamping * Added doc to code * Simplifed _stamp_headers() impl * Fixed Added line #LXXX was not covered by tests * Improved debugging with stamping tests * Improved CleanupVisitor * Added more test cases * Moved back stamping integration tests to test_canvas.py as it was originally * Added integration test: test_stamping_workflow() * Added unit test: test_callback_stamping_link_multiple_visitors() * Removed hack from development, now that the feature works as expected * LinkingVisitor will now link a more complicated canvas in the stamping unit tests * Fixed bug in stamped_linked_canvas fixture where all of the links were removed before the test --------- Co-authored-by: Omer Katz --- celery/app/task.py | 74 +- celery/canvas.py | 225 +++--- t/integration/test_canvas.py | 396 +++++------ t/unit/tasks/test_canvas.py | 923 +------------------------ t/unit/tasks/test_chord.py | 6 +- t/unit/tasks/test_stamping.py | 1229 +++++++++++++++++++++++++++++++++ 6 files changed, 1566 insertions(+), 1287 deletions(-) create mode 100644 t/unit/tasks/test_stamping.py diff --git a/celery/app/task.py b/celery/app/task.py index 1fed26393ce..d77952f6674 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -8,7 +8,7 @@ from celery import current_app, states from celery._state import _task_stack -from celery.canvas import GroupStampingVisitor, _chain, group, signature +from celery.canvas import _chain, group, signature from celery.exceptions import Ignore, ImproperlyConfigured, MaxRetriesExceededError, Reject, Retry from celery.local import class_property from celery.result import EagerResult, denied_join_result @@ -896,7 +896,7 @@ def send_event(self, type_, retry=True, retry_policy=None, **fields): type_, uuid=req.id, retry=retry, retry_policy=retry_policy, **fields) - def replace(self, sig): + def replace(self, sig, visitor=None): """Replace this task, with a new task inheriting the task id. Execution of the host task ends immediately and no subsequent statements @@ -904,8 +904,14 @@ def replace(self, sig): .. versionadded:: 4.0 + .. versionchanged:: 5.3 + Added new ``visitor`` argument, which is used when the task is + replaced to stamp the replaced task with the visitor's stamps. + In addition, any previous stamps will be passed to the replaced task. + Arguments: sig (Signature): signature to replace with. + visitor (StampingVisitor): Visitor API object. Raises: ~@Ignore: This is always raised when called in asynchronous context. @@ -953,41 +959,7 @@ def replace(self, sig): for t in reversed(self.request.chain or []): sig |= signature(t, app=self.app) # Stamping sig with parents groups - if self.request.stamps: - groups = self.request.stamps.get("groups") - sig.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=self.request.stamped_headers)) - stamped_headers = self.request.stamped_headers.copy() - stamps = self.request.stamps.copy() - stamped_headers.extend(sig.options.get('stamped_headers', [])) - stamped_headers = list(set(stamped_headers)) - stamps.update({ - stamp: value - for stamp, value in sig.options.items() if stamp in sig.options.get('stamped_headers', []) - }) - sig.options['stamped_headers'] = stamped_headers - sig.options.update(stamps) - - # Collecting all of the links (callback/errback) to stamp them - links = sig.options['link'] if 'link' in sig.options else [] - links.extend(sig.options['link_error'] if 'link_error' in sig.options else []) - - if hasattr(sig, "tasks"): - tasks = sig.tasks - if isinstance(tasks, group): - tasks = tasks.tasks - for task in tasks: - task.options['stamped_headers'] = stamped_headers - task.options.update(stamps) - links.extend(task.options['link'] if 'link' in task.options else []) - links.extend(task.options['link_error'] if 'link_error' in task.options else []) - - for link in links: - link_stamped_headers = stamped_headers.copy() - link_stamped_headers.extend(link['options'].get('stamped_headers', [])) - link_stamped_headers = list(set(link_stamped_headers)) - link['options']['stamped_headers'] = link_stamped_headers - link['options'].update(stamps) - + self.on_stamp_replaced(sig, visitor) return self.on_replace(sig) def add_to_chord(self, sig, lazy=False): @@ -1104,6 +1076,34 @@ def after_return(self, status, retval, task_id, args, kwargs, einfo): None: The return value of this handler is ignored. """ + def on_stamp_replaced(self, sig, visitor=None): + """Handler called when the task is replaced and passes + the stamps from the original task to the replaced task. + + .. versionadded:: 5.3 + + Arguments: + sig (Signature): signature to replace with. + visitor (StampingVisitor): Visitor API object. + """ + stamps = {} + + # If the original task had stamps + if self.request.stamps: + # Copy the stamps to the new signature + stamps = self.request.stamps.copy() + for header, stamp in stamps.items(): + # The request will contain single stamps as a list of one element so we need to unpack them to + # keep consistency with stamping with a header of a single stamp (which will not be a list + # implicitly like in the request) + # This will also flat stamps that were originally a list of a single stamp to create consistency + # with stamping a single header stamp to always be a flattened + stamp = stamp[0] if len(stamp) == 1 else stamp + stamps[header] = stamp + + if visitor: # This check avoids infinite recursion when the visitor is None + sig.stamp(visitor=visitor, **stamps) + def on_replace(self, sig): """Handler called when the task is replaced. diff --git a/celery/canvas.py b/celery/canvas.py index 8c09a9c5b90..900a46b8518 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -70,7 +70,7 @@ def _stamp_regen_task(task, visitor, **headers): return task -def _merge_dictionaries(d1, d2): +def _merge_dictionaries(d1, d2, aggregate_duplicates=True): """Merge two dictionaries recursively into the first one. Example: @@ -84,17 +84,27 @@ def _merge_dictionaries(d1, d2): 'tuple': (1, 2), 'set': {'a', 'b'} } + + Arguments: + d1 (dict): Dictionary to merge into. + d2 (dict): Dictionary to merge from. + aggregate_duplicates (bool): + If True, aggregate duplicated items (by key) into a list of all values in d1 in the same key. + If False, duplicate keys will be taken from d2 and override the value in d1. """ + if not d2: + return + for key, value in d1.items(): if key in d2: if isinstance(value, dict): _merge_dictionaries(d1[key], d2[key]) else: if isinstance(value, (int, float, str)): - d1[key] = [value] - if isinstance(d2[key], list) and d1[key] is not None: + d1[key] = [value] if aggregate_duplicates else value + if isinstance(d2[key], list) and isinstance(d1[key], list): d1[key].extend(d2[key]) - else: + elif aggregate_duplicates: if d1[key] is None: d1[key] = [] else: @@ -162,33 +172,33 @@ def on_signature(self, sig, **headers) -> dict: Dict: headers to update. """ - def on_chord_header_start(self, chord, **header) -> dict: + def on_chord_header_start(self, sig, **header) -> dict: """Method that is called on сhord header stamping start. Arguments: - chord (chord): chord that is stamped. + sig (chord): chord that is stamped. headers (Dict): Partial headers that could be merged with existing headers. Returns: Dict: headers to update. """ - if not isinstance(chord.tasks, group): - chord.tasks = group(chord.tasks) - return self.on_group_start(chord.tasks, **header) + if not isinstance(sig.tasks, group): + sig.tasks = group(sig.tasks) + return self.on_group_start(sig.tasks, **header) - def on_chord_header_end(self, chord, **header) -> None: + def on_chord_header_end(self, sig, **header) -> None: """Method that is called on сhord header stamping end. Arguments: - chord (chord): chord that is stamped. + sig (chord): chord that is stamped. headers (Dict): Partial headers that could be merged with existing headers. """ - self.on_group_end(chord.tasks, **header) + self.on_group_end(sig.tasks, **header) - def on_chord_body(self, chord, **header) -> dict: + def on_chord_body(self, sig, **header) -> dict: """Method that is called on chord body stamping. Arguments: - chord (chord): chord that is stamped. + sig (chord): chord that is stamped. headers (Dict): Partial headers that could be merged with existing headers. Returns: Dict: headers to update. @@ -218,32 +228,6 @@ def on_errback(self, errback, **header) -> dict: return {} -class GroupStampingVisitor(StampingVisitor): - """ - Group stamping implementation based on Stamping API. - """ - - def __init__(self, groups=None, stamped_headers=None): - self.groups = groups or [] - self.stamped_headers = stamped_headers or [] - if "groups" not in self.stamped_headers: - self.stamped_headers.append("groups") - - def on_group_start(self, group, **headers) -> dict: - if group.id is None: - group.set(task_id=uuid()) - - if group.id not in self.groups: - self.groups.append(group.id) - return super().on_group_start(group, **headers) - - def on_group_end(self, group, **headers) -> None: - self.groups.pop() - - def on_signature(self, sig, **headers) -> dict: - return {'groups': list(self.groups), "stamped_headers": list(self.stamped_headers)} - - @abstract.CallableSignature.register class Signature(dict): """Task Signature. @@ -376,9 +360,6 @@ def apply(self, args=None, kwargs=None, **options): """ args = args if args else () kwargs = kwargs if kwargs else {} - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) # Extra options set to None are dismissed options = {k: v for k, v in options.items() if v is not None} # For callbacks: extra args are prepended to the stored args. @@ -402,9 +383,6 @@ def apply_async(self, args=None, kwargs=None, route_name=None, **options): """ args = args if args else () kwargs = kwargs if kwargs else {} - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) # Extra options set to None are dismissed options = {k: v for k, v in options.items() if v is not None} try: @@ -449,7 +427,7 @@ def _merge(self, args=None, kwargs=None, options=None, force=False): # implying that allowing their value to change would stall tasks immutable_options = self._IMMUTABLE_OPTIONS if "stamped_headers" in self.options: - immutable_options = self._IMMUTABLE_OPTIONS.union(set(self.options["stamped_headers"])) + immutable_options = self._IMMUTABLE_OPTIONS.union(set(self.options.get("stamped_headers", []))) # merge self.options with options without overriding stamped headers from self.options new_options = {**self.options, **{ k: v for k, v in options.items() @@ -577,27 +555,73 @@ def stamp(self, visitor=None, **headers): Using a visitor will pass on responsibility for the stamping to the visitor. + .. versionadded:: 5.3 + Arguments: visitor (StampingVisitor): Visitor API object. headers (Dict): Stamps that should be added to headers. """ self.stamp_links(visitor, **headers) - headers = headers.copy() + visitor_headers = None if visitor is not None: - visitor_headers = visitor.on_signature(self, **headers) + visitor_headers = visitor.on_signature(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) + return self.set(**headers) + + def _stamp_headers(self, visitor_headers=None, **headers): + """ Collect all stamps from visitor, headers and self, + and return an idempotent dictionary of stamps. + + .. versionadded:: 5.3 + + Arguments: + visitor_headers (Dict): Stamps from a visitor method. + headers (Dict): Stamps that should be added to headers. + + Returns: + Dict: Merged stamps. + """ + # Use aggregate_duplicates=False to prioritize visitor_headers over headers in case of duplicated stamps. + # This will lose duplicated headers from the headers argument, but that is the best effort solution + # to avoid implicitly casting the duplicated stamp into a list of both stamps from headers and + # visitor_headers of the same key. + # Example: + # headers = {"foo": "bar1"} + # visitor_headers = {"foo": "bar2"} + # _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=True) + # headers["foo"] == ["bar1", "bar2"] -> The stamp is now a list + # _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=False) + # headers["foo"] == "bar2" -> "bar1" is lost, but the stamp is according to the visitor + aggregate_duplicates = False + + headers = headers.copy() + # Merge headers with visitor headers + if visitor_headers is not None: + visitor_headers = visitor_headers or {} if "stamped_headers" not in visitor_headers: visitor_headers["stamped_headers"] = list(visitor_headers.keys()) - _merge_dictionaries(headers, visitor_headers) + # Prioritize visitor_headers over headers + _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=aggregate_duplicates) + headers["stamped_headers"] = list(set(headers["stamped_headers"])) + # Merge headers with self.options else: - headers["stamped_headers"] = [header for header in headers.keys() if header not in self.options] - _merge_dictionaries(headers, self.options) + headers["stamped_headers"] = [ + header for header in headers.keys() + if header not in self.options and header != "stamped_headers" + ] - # Preserve previous stamped headers - stamped_headers = set(self.options.get("stamped_headers", [])) - stamped_headers.update(headers["stamped_headers"]) + # Prioritize self.options over headers + _merge_dictionaries(headers, self.options, aggregate_duplicates=aggregate_duplicates) + + # Sync missing stamps from self.options (relevant for stamping during task replacement) + stamped_headers = set(headers.get("stamped_headers", [])) + stamped_headers.update(self.options.get("stamped_headers", [])) headers["stamped_headers"] = list(stamped_headers) - return self.set(**headers) + for previous_header in stamped_headers: + if previous_header not in headers and previous_header in self.options: + headers[previous_header] = self.options[previous_header] + return headers def stamp_links(self, visitor, **headers): """Stamp this signature links (callbacks and errbacks). @@ -608,42 +632,27 @@ def stamp_links(self, visitor, **headers): visitor (StampingVisitor): Visitor API object. headers (Dict): Stamps that should be added to headers. """ - if not visitor: - return - non_visitor_headers = headers.copy() # Stamp all of the callbacks of this signature - headers = non_visitor_headers.copy() + headers = deepcopy(non_visitor_headers) for link in self.options.get('link', []) or []: - visitor_headers = visitor.on_callback(link, **headers) - if visitor_headers and "stamped_headers" not in visitor_headers: - visitor_headers["stamped_headers"] = list(visitor_headers.keys()) - headers.update(visitor_headers or {}) link = maybe_signature(link, app=self.app) + visitor_headers = None + if visitor is not None: + visitor_headers = visitor.on_callback(link, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) link.stamp(visitor=visitor, **headers) - # Stamping a link to a signature with previous stamps - # may result in missing stamps in the link options, if the linking - # was done AFTER the stamping of the signature - for stamp in link.options['stamped_headers']: - if stamp in self.options and stamp not in link.options: - link.options[stamp] = self.options[stamp] # Stamp all of the errbacks of this signature - headers = non_visitor_headers.copy() + headers = deepcopy(non_visitor_headers) for link in self.options.get('link_error', []) or []: - visitor_headers = visitor.on_errback(link, **headers) - if visitor_headers and "stamped_headers" not in visitor_headers: - visitor_headers["stamped_headers"] = list(visitor_headers.keys()) - headers.update(visitor_headers or {}) link = maybe_signature(link, app=self.app) + visitor_headers = None + if visitor is not None: + visitor_headers = visitor.on_errback(link, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) link.stamp(visitor=visitor, **headers) - # Stamping a link to a signature with previous stamps - # may result in missing stamps in the link options, if the linking - # was done AFTER the stamping of the signature - for stamp in link.options['stamped_headers']: - if stamp in self.options and stamp not in link.options: - link.options[stamp] = self.options[stamp] def _with_list_option(self, key): """Gets the value at the given self.options[key] as a list. @@ -1026,17 +1035,11 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None, task_id, group_id, chord, group_index=group_index, ) - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - visitor = GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers) - self.stamp(visitor=visitor) - # For a chain of single task, execute the task directly and return the result for that task # For a chain of multiple tasks, execute all of the tasks and return the AsyncResult for the chain if results_from_prepare: if link: tasks[0].extend_list_option('link', link) - tasks[0].stamp_links(visitor=visitor) first_task = tasks.pop() options = _prepare_chain_from_options(options, tasks, use_link) @@ -1065,10 +1068,12 @@ def freeze(self, _id=None, group_id=None, chord=None, return results[0] def stamp(self, visitor=None, **headers): + visitor_headers = None if visitor is not None: - headers.update(visitor.on_chain_start(self, **headers)) + visitor_headers = visitor.on_chain_start(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) + self.stamp_links(visitor, **headers) - super().stamp(visitor=visitor, **headers) for task in self.tasks: task.stamp(visitor=visitor, **headers) @@ -1234,9 +1239,6 @@ def prepare_steps(self, args, kwargs, tasks, def apply(self, args=None, kwargs=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) last, (fargs, fkwargs) = None, (args, kwargs) for task in self.tasks: res = task.clone(fargs, fkwargs).apply( @@ -1565,11 +1567,6 @@ def apply_async(self, args=None, kwargs=None, add_to_parent=True, options, group_id, root_id = self._freeze_gid(options) tasks = self._prepared(self.tasks, [], group_id, root_id, app) - - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) - p = barrier() results = list(self._apply_tasks(tasks, producer, app, p, args=args, kwargs=kwargs, **options)) @@ -1593,9 +1590,6 @@ def apply_async(self, args=None, kwargs=None, add_to_parent=True, def apply(self, args=None, kwargs=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) app = self.app if not self.tasks: return self.freeze() # empty group returns GroupResult @@ -1610,10 +1604,11 @@ def set_immutable(self, immutable): task.set_immutable(immutable) def stamp(self, visitor=None, **headers): + visitor_headers = None if visitor is not None: - headers.update(visitor.on_group_start(self, **headers)) - - super().stamp(visitor=visitor, **headers) + visitor_headers = visitor.on_group_start(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) + self.stamp_links(visitor, **headers) if isinstance(self.tasks, _regen): self.tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) @@ -2070,21 +2065,25 @@ def stamp(self, visitor=None, **headers): if isinstance(tasks, group): tasks = tasks.tasks + visitor_headers = None if visitor is not None: - headers.update(visitor.on_chord_header_start(self, **headers)) - super().stamp(visitor=visitor, **headers) + visitor_headers = visitor.on_chord_header_start(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) + self.stamp_links(visitor, **headers) if isinstance(tasks, _regen): tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) else: + stamps = headers.copy() for task in tasks: - task.stamp(visitor=visitor, **headers) + task.stamp(visitor=visitor, **stamps) if visitor is not None: visitor.on_chord_header_end(self, **headers) if visitor is not None and self.body is not None: - headers.update(visitor.on_chord_body(self, **headers)) + visitor_headers = visitor.on_chord_body(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, **headers) self.body.stamp(visitor=visitor, **headers) def apply_async(self, args=None, kwargs=None, task_id=None, @@ -2105,13 +2104,7 @@ def apply_async(self, args=None, kwargs=None, task_id=None, return self.apply(args, kwargs, body=body, task_id=task_id, **options) - groups = self.options.get("groups") - stamped_headers = self.options.get("stamped_headers") - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) - tasks.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) - merged_options = dict(self.options, **options) if options else self.options - option_task_id = merged_options.pop("task_id", None) if task_id is None: task_id = option_task_id @@ -2123,13 +2116,9 @@ def apply(self, args=None, kwargs=None, propagate=True, body=None, **options): args = args if args else () kwargs = kwargs if kwargs else {} - stamped_headers = self.options.get("stamped_headers") - groups = self.options.get("groups") body = self.body if body is None else body tasks = (self.tasks.clone() if isinstance(self.tasks, group) else group(self.tasks, app=self.app)) - self.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) - tasks.stamp(visitor=GroupStampingVisitor(groups=groups, stamped_headers=stamped_headers)) return body.apply( args=(tasks.apply(args, kwargs).get(propagate=propagate),), ) @@ -2201,7 +2190,7 @@ def run(self, header, body, partial_args, app=None, interval=None, if options: options.pop('task_id', None) stamped_headers = set(body.options.get("stamped_headers", [])) - stamped_headers.update(options["stamped_headers"]) + stamped_headers.update(options.get("stamped_headers", [])) options["stamped_headers"] = list(stamped_headers) body.options.update(options) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 4d91accb3d0..337cbbe6c7f 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -506,21 +506,6 @@ def test_chain_of_a_chord_and_three_tasks_and_a_group(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [8, 8] - def test_stamping_example_canvas(self, manager): - """Test the stamping example canvas from the examples directory""" - try: - manager.app.backend.ensure_chords_allowed() - except NotImplementedError as e: - raise pytest.skip(e.args[0]) - - c = chain( - group(identity.s(i) for i in range(1, 4)) | xsum.s(), - chord(group(mul.s(10) for _ in range(1, 4)), xsum.s()), - ) - - res = c() - assert res.get(timeout=TIMEOUT) == 180 - @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout") def test_nested_chain_group_lone(self, manager): """ @@ -1068,46 +1053,6 @@ def test_result_set_error(self, manager): class test_group: - def test_group_stamping(self, manager, subtests): - if not manager.app.conf.result_backend.startswith('redis'): - raise pytest.skip('Requires redis result backend.') - - sig1 = add.s(1, 1000) - sig1_res = sig1.freeze() - g1 = group(sig1, add.s(1, 2000)) - g1_res = g1.freeze() - res = g1.apply_async() - res.get(timeout=TIMEOUT) - - with subtests.test("sig_1 is stamped", groups=[g1_res.id]): - assert sig1_res._get_task_meta()["groups"] == [g1_res.id] - - def test_nested_group_stamping(self, manager, subtests): - if not manager.app.conf.result_backend.startswith('redis'): - raise pytest.skip('Requires redis result backend.') - - sig1 = add.s(2, 2) - sig2 = add.s(2) - - sig1_res = sig1.freeze() - sig2_res = sig2.freeze() - - g2 = group(sig2, chain(add.s(4), add.s(2))) - - g2_res = g2.freeze() - - g1 = group(sig1, chain(add.s(1, 1), g2)) - - g1_res = g1.freeze() - res = g1.apply_async() - res.get(timeout=TIMEOUT) - - with subtests.test("sig1 is stamped", groups=[g1_res.id]): - assert sig1_res._get_task_meta()['groups'] == [g1_res.id] - with subtests.test("sig2 is stamped", groups=[g1_res.id, g2_res.id]): - assert sig2_res._get_task_meta()['groups'] == \ - [g1_res.id, g2_res.id] - @flaky def test_ready_with_exception(self, manager): if not manager.app.conf.result_backend.startswith('redis'): @@ -1550,43 +1495,6 @@ def assert_ping(manager): class test_chord: - def test_chord_stamping_two_levels(self, manager, subtests): - """ - For a group within a chord, test that group stamps are stored in - the correct order. - """ - try: - manager.app.backend.ensure_chords_allowed() - except NotImplementedError as e: - raise pytest.skip(e.args[0]) - - sig_1 = add.s(2, 2) - sig_2 = add.s(2) - - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - - g2 = group( - sig_2, - add.s(4), - ) - - g2_res = g2.freeze() - - sig_sum = xsum.s() - sig_sum.freeze() - - g1 = chord([sig_1, chain(add.s(4, 4), g2)], sig_sum) - g1.freeze() - - res = g1.apply_async() - res.get(timeout=TIMEOUT) - - with subtests.test("sig_1_res is stamped", groups=[g1.tasks.id]): - assert sig_1_res._get_task_meta()['groups'] == [g1.tasks.id] - with subtests.test("sig_2_res is stamped", groups=[g1.id]): - assert sig_2_res._get_task_meta()['groups'] == [g1.tasks.id, g2_res.id] - @flaky def test_simple_chord_with_a_delay_in_group_save(self, manager, monkeypatch): try: @@ -3143,20 +3051,116 @@ def test_rebuild_nested_chord_chord(self, manager): sig.delay().get(timeout=TIMEOUT) -class test_stamping_visitor: +class test_stamping_mechanism: + def test_stamping_workflow(self, manager, subtests): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + workflow = group( + add.s(1, 2) | add.s(3), + add.s(4, 5) | add.s(6), + identity.si(21), + ) | group( + xsum.s(), + xsum.s(), + ) + + @task_received.connect + def task_received_handler(request=None, **kwargs): + nonlocal assertion_result + link = None + if request._Request__payload[2]["callbacks"]: + link = signature(request._Request__payload[2]["callbacks"][0]) + link_error = None + if request._Request__payload[2]["errbacks"]: + link_error = signature(request._Request__payload[2]["errbacks"][0]) + + assertion_result = all( + [ + assertion_result, + [stamped_header in request.stamps for stamped_header in request.stamped_headers], + [ + stamped_header in link.options + for stamped_header in link.options["stamped_headers"] + if link # the link itself doensn't have a link + ], + [ + stamped_header in link_error.options + for stamped_header in link_error.options["stamped_headers"] + if link_error # the link_error itself doensn't have a link + ], + ] + ) + + @before_task_publish.connect + def before_task_publish_handler( + body=None, + headers=None, + **kwargs, + ): + nonlocal assertion_result + + assertion_result = all( + [stamped_header in headers["stamps"] for stamped_header in headers["stamped_headers"]] + ) + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"on_signature": 42} + + with subtests.test("Prepare canvas workflow and stamp it"): + link_sig = identity.si("link") + link_error_sig = identity.si("link_error") + canvas_workflow = workflow + canvas_workflow.link(link_sig) + canvas_workflow.link_error(link_error_sig) + canvas_workflow.stamp(visitor=CustomStampingVisitor()) + + with subtests.test("Check canvas was executed successfully"): + assertion_result = False + assert canvas_workflow.apply_async().get() == [42] * 2 + assert assertion_result + + def test_stamping_example_canvas(self, manager): + """Test the stamping example canvas from the examples directory""" + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + c = chain( + group(identity.s(i) for i in range(1, 4)) | xsum.s(), + chord(group(mul.s(10) for _ in range(1, 4)), xsum.s()), + ) + + res = c() + assert res.get(timeout=TIMEOUT) == 180 + def test_stamp_value_type_defined_by_visitor(self, manager, subtests): - """ Test that the visitor can define the type of the stamped value """ + """Test that the visitor can define the type of the stamped value""" @before_task_publish.connect - def before_task_publish_handler(sender=None, body=None, exchange=None, routing_key=None, headers=None, - properties=None, declare=None, retry_policy=None, **kwargs): + def before_task_publish_handler( + sender=None, + body=None, + exchange=None, + routing_key=None, + headers=None, + properties=None, + declare=None, + retry_policy=None, + **kwargs, + ): nonlocal task_headers task_headers = headers.copy() - with subtests.test(msg='Test stamping a single value'): + with subtests.test(msg="Test stamping a single value"): + class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': 42} + return {"stamp": 42} stamped_task = add.si(1, 1) stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3165,14 +3169,15 @@ def on_signature(self, sig, **headers) -> dict: stamped_task.apply_async() assert task_headers is not None assert result.get() == 2 - assert 'stamps' in task_headers - assert 'stamp' in task_headers['stamps'] - assert not isinstance(task_headers['stamps']['stamp'], list) + assert "stamps" in task_headers + assert "stamp" in task_headers["stamps"] + assert not isinstance(task_headers["stamps"]["stamp"], list) + + with subtests.test(msg="Test stamping a list of values"): - with subtests.test(msg='Test stamping a list of values'): class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': [4, 2]} + return {"stamp": [4, 2]} stamped_task = add.si(1, 1) stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3181,16 +3186,25 @@ def on_signature(self, sig, **headers) -> dict: stamped_task.apply_async() assert task_headers is not None assert result.get() == 2 - assert 'stamps' in task_headers - assert 'stamp' in task_headers['stamps'] - assert isinstance(task_headers['stamps']['stamp'], list) + assert "stamps" in task_headers + assert "stamp" in task_headers["stamps"] + assert isinstance(task_headers["stamps"]["stamp"], list) def test_properties_not_affected_from_stamping(self, manager, subtests): - """ Test that the task properties are not dirty with stamping visitor entries """ + """Test that the task properties are not dirty with stamping visitor entries""" @before_task_publish.connect - def before_task_publish_handler(sender=None, body=None, exchange=None, routing_key=None, headers=None, - properties=None, declare=None, retry_policy=None, **kwargs): + def before_task_publish_handler( + sender=None, + body=None, + exchange=None, + routing_key=None, + headers=None, + properties=None, + declare=None, + retry_policy=None, + **kwargs, + ): nonlocal task_headers nonlocal task_properties task_headers = headers.copy() @@ -3198,7 +3212,7 @@ def before_task_publish_handler(sender=None, body=None, exchange=None, routing_k class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': 42} + return {"stamp": 42} stamped_task = add.si(1, 1) stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3208,35 +3222,27 @@ def on_signature(self, sig, **headers) -> dict: stamped_task.apply_async() assert task_properties is not None assert result.get() == 2 - assert 'stamped_headers' in task_headers - stamped_headers = task_headers['stamped_headers'] + assert "stamped_headers" in task_headers + stamped_headers = task_headers["stamped_headers"] - with subtests.test(msg='Test that the task properties are not dirty with stamping visitor entries'): - assert 'stamped_headers' not in task_properties, 'stamped_headers key should not be in task properties' + with subtests.test(msg="Test that the task properties are not dirty with stamping visitor entries"): + assert "stamped_headers" not in task_properties, "stamped_headers key should not be in task properties" for stamp in stamped_headers: assert stamp not in task_properties, f'The stamp "{stamp}" should not be in the task properties' def test_task_received_has_access_to_stamps(self, manager): - """ Make sure that the request has the stamps using the task_received signal """ + """Make sure that the request has the stamps using the task_received signal""" assertion_result = False @task_received.connect - def task_received_handler( - sender=None, - request=None, - signal=None, - **kwargs - ): + def task_received_handler(sender=None, request=None, signal=None, **kwargs): nonlocal assertion_result - assertion_result = all([ - stamped_header in request.stamps - for stamped_header in request.stamped_headers - ]) + assertion_result = all([stamped_header in request.stamps for stamped_header in request.stamped_headers]) class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': 42} + return {"stamp": 42} stamped_task = add.si(1, 1) stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3244,7 +3250,7 @@ def on_signature(self, sig, **headers) -> dict: assert assertion_result def test_all_tasks_of_canvas_are_stamped(self, manager, subtests): - """ Test that complex canvas are stamped correctly """ + """Test that complex canvas are stamped correctly""" try: manager.app.backend.ensure_chords_allowed() except NotImplementedError as e: @@ -3252,14 +3258,16 @@ def test_all_tasks_of_canvas_are_stamped(self, manager, subtests): @task_received.connect def task_received_handler(**kwargs): - request = kwargs['request'] + request = kwargs["request"] nonlocal assertion_result - assertion_result = all([ - assertion_result, - all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), - request.stamps['stamp'] == 42 - ]) + assertion_result = all( + [ + assertion_result, + all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), + request.stamps["stamp"] == 42, + ] + ) # Using a list because pytest.mark.parametrize does not play well canvas = [ @@ -3272,15 +3280,25 @@ def task_received_handler(**kwargs): group(add.s(1, 1) | add.s(10), add.s(2, 2) | add.s(20)), chain(add.s(1, 1) | add.s(10), add.s(2) | add.s(20)), chord([add.s(1, 1) | add.s(10), add.s(2, 2) | add.s(20)], xsum.s()), - chain(chain(add.s(1, 1) | add.s(10), add.s(2) | add.s(20)), add.s(3) | add.s(30)), - chord(group(chain(add.s(1, 1), add.s(2)), chord([add.s(3, 3), add.s(4, 4)], xsum.s())), xsum.s()), + chain( + chain(add.s(1, 1) | add.s(10), add.s(2) | add.s(20)), + add.s(3) | add.s(30), + ), + chord( + group( + chain(add.s(1, 1), add.s(2)), + chord([add.s(3, 3), add.s(4, 4)], xsum.s()), + ), + xsum.s(), + ), ] for sig in canvas: - with subtests.test(msg='Assert all tasks are stamped'): + with subtests.test(msg="Assert all tasks are stamped"): + class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': 42} + return {"stamp": 42} stamped_task = sig stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3289,26 +3307,29 @@ def on_signature(self, sig, **headers) -> dict: assert assertion_result def test_replace_merge_stamps(self, manager): - """ Test that replacing a task keeps the previous and new stamps """ + """Test that replacing a task keeps the previous and new stamps""" @task_received.connect def task_received_handler(**kwargs): - request = kwargs['request'] + request = kwargs["request"] nonlocal assertion_result expected_stamp_key = list(StampOnReplace.stamp.keys())[0] expected_stamp_value = list(StampOnReplace.stamp.values())[0] - assertion_result = all([ - assertion_result, - all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), - request.stamps['stamp'] == 42, - request.stamps[expected_stamp_key] == expected_stamp_value - if 'replaced_with_me' in request.task_name else True - ]) + assertion_result = all( + [ + assertion_result, + all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), + request.stamps["stamp"] == 42, + request.stamps[expected_stamp_key] == expected_stamp_value + if "replaced_with_me" in request.task_name + else True, + ] + ) class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: - return {'stamp': 42} + return {"stamp": 42} stamped_task = replace_with_stamped_task.s() stamped_task.stamp(visitor=CustomStampingVisitor()) @@ -3318,57 +3339,22 @@ def on_signature(self, sig, **headers) -> dict: sleep(1) # stamped_task needs to be stamped with CustomStampingVisitor # and the replaced task with both CustomStampingVisitor and StampOnReplace - assert assertion_result, 'All of the tasks should have been stamped' - - def test_replace_group_merge_stamps(self, manager): - """ Test that replacing a group signature keeps the previous and new group stamps """ - - x = 5 - y = 6 - - @task_received.connect - def task_received_handler(**kwargs): - request = kwargs['request'] - nonlocal assertion_result - nonlocal gid1 - - assertion_result = all([ - assertion_result, - request.stamps['groups'][0] == gid1, - len(request.stamps['groups']) == 2 - if any([request.args == [10, x], request.args == [10, y]]) else True - ]) - - sig = add.s(3, 3) | add.s(4) | group(add.s(x), add.s(y)) - sig = group(add.s(1, 1), add.s(2, 2), replace_with_stamped_task.s(replace_with=sig)) - assertion_result = False - sig.delay() - assertion_result = True - gid1 = sig.options['task_id'] - sleep(1) - assert assertion_result, 'Group stamping is corrupted' + assert assertion_result, "All of the tasks should have been stamped" def test_linking_stamped_sig(self, manager): - """ Test that linking a callback after stamping will stamp the callback correctly""" + """Test that linking a callback after stamping will stamp the callback correctly""" assertion_result = False @task_received.connect - def task_received_handler( - sender=None, - request=None, - signal=None, - **kwargs - ): + def task_received_handler(sender=None, request=None, signal=None, **kwargs): nonlocal assertion_result - link = request._Request__payload[2]['callbacks'][0] - assertion_result = all([ - stamped_header in link['options'] - for stamped_header in link['options']['stamped_headers'] - ]) + link = request._Request__payload[2]["callbacks"][0] + assertion_result = all( + [stamped_header in link["options"] for stamped_header in link["options"]["stamped_headers"]] + ) class FixedMonitoringIdStampingVisitor(StampingVisitor): - def __init__(self, msg_id): self.msg_id = msg_id @@ -3376,36 +3362,31 @@ def on_signature(self, sig, **headers): mtask_id = self.msg_id return {"mtask_id": mtask_id} - link_sig = identity.si('link_sig') - stamped_pass_sig = identity.si('passing sig') + link_sig = identity.si("link_sig") + stamped_pass_sig = identity.si("passing sig") stamped_pass_sig.stamp(visitor=FixedMonitoringIdStampingVisitor(str(uuid.uuid4()))) stamped_pass_sig.link(link_sig) - # This causes the relevant stamping for this test case - # as it will stamp the link via the group stamping internally + stamped_pass_sig.stamp(visitor=FixedMonitoringIdStampingVisitor("1234")) stamped_pass_sig.apply_async().get(timeout=2) assert assertion_result def test_err_linking_stamped_sig(self, manager): - """ Test that linking an error after stamping will stamp the errlink correctly""" + """Test that linking an error after stamping will stamp the errlink correctly""" assertion_result = False @task_received.connect - def task_received_handler( - sender=None, - request=None, - signal=None, - **kwargs - ): + def task_received_handler(sender=None, request=None, signal=None, **kwargs): nonlocal assertion_result link_error = request.errbacks[0] - assertion_result = all([ - stamped_header in link_error['options'] - for stamped_header in link_error['options']['stamped_headers'] - ]) + assertion_result = all( + [ + stamped_header in link_error["options"] + for stamped_header in link_error["options"]["stamped_headers"] + ] + ) class FixedMonitoringIdStampingVisitor(StampingVisitor): - def __init__(self, msg_id): self.msg_id = msg_id @@ -3413,12 +3394,11 @@ def on_signature(self, sig, **headers): mtask_id = self.msg_id return {"mtask_id": mtask_id} - link_error_sig = identity.si('link_error') + link_error_sig = identity.si("link_error") stamped_fail_sig = fail.si() stamped_fail_sig.stamp(visitor=FixedMonitoringIdStampingVisitor(str(uuid.uuid4()))) stamped_fail_sig.link_error(link_error_sig) with pytest.raises(ExpectedException): - # This causes the relevant stamping for this test case - # as it will stamp the link via the group stamping internally + stamped_fail_sig.stamp(visitor=FixedMonitoringIdStampingVisitor("1234")) stamped_fail_sig.apply_async().get() assert assertion_result diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index a22a4ed1ced..8f3fbd25ec0 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -6,12 +6,9 @@ import pytest import pytest_subtests # noqa -from celery import Task from celery._state import _task_stack -from celery.canvas import (GroupStampingVisitor, Signature, StampingVisitor, _chain, _maybe_group, - _merge_dictionaries, chain, chord, chunks, group, maybe_signature, maybe_unroll_group, - signature, xmap, xstarmap) -from celery.exceptions import Ignore +from celery.canvas import (Signature, _chain, _maybe_group, _merge_dictionaries, chain, chord, chunks, group, + maybe_signature, maybe_unroll_group, signature, xmap, xstarmap) from celery.result import AsyncResult, EagerResult, GroupResult SIG = Signature({ @@ -135,437 +132,6 @@ def __init__(self, *args, **kwargs): class test_Signature(CanvasCase): - @pytest.mark.usefixtures('depends_on_current_app') - def test_on_signature_gets_the_signature(self): - expected_sig = self.add.s(4, 2) - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, actual_sig, **headers) -> dict: - nonlocal expected_sig - assert actual_sig == expected_sig - return {'header': 'value'} - - sig = expected_sig.clone() - sig.stamp(CustomStampingVisitor()) - assert sig.options['header'] == 'value' - - def test_double_stamping(self, subtests): - """ - Test manual signature stamping with two different stamps. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_1.stamp(stamp1="stamp1") - sig_1.stamp(stamp2="stamp2") - sig_1_res = sig_1.freeze() - sig_1.apply() - - with subtests.test("sig_1_res is stamped with stamp1", stamp1=["stamp1"]): - assert sig_1_res._get_task_meta()["stamp1"] == ["stamp1"] - - with subtests.test("sig_1_res is stamped with stamp2", stamp2=["stamp2"]): - assert sig_1_res._get_task_meta()["stamp2"] == ["stamp2"] - - with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): - assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp2", "stamp1", "groups"]) - - def test_twice_stamping(self, subtests): - """ - Test manual signature stamping with two stamps twice. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_1.stamp(stamp="stamp1") - sig_1.stamp(stamp="stamp2") - sig_1_res = sig_1.freeze() - sig_1.apply() - - with subtests.test("sig_1_res is stamped twice", stamps=["stamp2", "stamp1"]): - assert sorted(sig_1_res._get_task_meta()["stamp"]) == sorted(["stamp2", "stamp1"]) - - with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): - assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp", "groups"]) - - def test_manual_stamping(self): - """ - Test manual signature stamping. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - stamps = ["stamp1", "stamp2"] - sig_1.stamp(visitor=None, groups=[stamps[1]]) - sig_1.stamp(visitor=None, groups=stamps[0]) - sig_1_res = sig_1.freeze() - sig_1.apply() - assert sorted(sig_1_res._get_task_meta()['groups']) == sorted(stamps) - - def test_custom_stamping_visitor(self, subtests): - """ - Test manual signature stamping with a custom visitor class. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - class CustomStampingVisitor1(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - # without using stamped_headers key explicitly - # the key will be calculated from the headers implicitly - return {'header': 'value'} - - class CustomStampingVisitor2(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value', 'stamped_headers': ['header']} - - sig_1 = self.add.s(2, 2) - sig_1.stamp(visitor=CustomStampingVisitor1()) - sig_1_res = sig_1.freeze() - sig_1.apply() - sig_2 = self.add.s(2, 2) - sig_2.stamp(visitor=CustomStampingVisitor2()) - sig_2_res = sig_2.freeze() - sig_2.apply() - - with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("sig_1 is stamped with custom visitor", header=["value"]): - assert sig_1_res._get_task_meta()["header"] == ["value"] - - with subtests.test("sig_2 is stamped with custom visitor", header=["value"]): - assert sig_2_res._get_task_meta()["header"] == ["value"] - - @pytest.mark.usefixtures('depends_on_current_app') - def test_callback_stamping(self, subtests): - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value'} - - def on_callback(self, callback, **header) -> dict: - return {'on_callback': True} - - def on_errback(self, errback, **header) -> dict: - return {'on_errback': True} - - sig_1 = self.add.s(0, 1) - sig_1_res = sig_1.freeze() - group_sig = group([self.add.s(3), self.add.s(4)]) - group_sig_res = group_sig.freeze() - chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) - chord_sig_res = chord_sig.freeze() - sig_2 = self.add.s(2) - sig_2_res = sig_2.freeze() - chain_sig = chain( - sig_1, # --> 1 - group_sig, # --> [1+3, 1+4] --> [4, 5] - chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 - sig_2 # --> 18 + 2 --> 20 - ) - callback = signature('callback_task') - errback = signature('errback_task') - chain_sig.link(callback) - chain_sig.link_error(errback) - chain_sig.stamp(visitor=CustomStampingVisitor()) - chain_sig_res = chain_sig.apply_async() - chain_sig_res.get() - - with subtests.test("Confirm the chain was executed correctly", result=20): - # Before we run our assersions, let's confirm the base functionality of the chain is working - # as expected including the links stamping. - assert chain_sig_res.result == 20 - - with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): - for result in group_sig_res.results: - assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("callback is stamped with custom visitor", - stamped_headers=["header", "groups, on_callback"]): - callback_link = chain_sig.options['link'][0] - headers = callback_link.options - stamped_headers = headers['stamped_headers'] - assert sorted(stamped_headers) == sorted(["header", "groups", "on_callback"]) - assert headers['on_callback'] is True - assert headers['header'] == 'value' - - with subtests.test("errback is stamped with custom visitor", - stamped_headers=["header", "groups, on_errback"]): - errback_link = chain_sig.options['link_error'][0] - headers = errback_link.options - stamped_headers = headers['stamped_headers'] - assert sorted(stamped_headers) == sorted(["header", "groups", "on_errback"]) - assert headers['on_errback'] is True - assert headers['header'] == 'value' - - @pytest.mark.usefixtures('depends_on_current_app') - def test_callback_stamping_link_after_stamp(self, subtests): - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value'} - - def on_callback(self, callback, **header) -> dict: - return {'on_callback': True} - - def on_errback(self, errback, **header) -> dict: - return {'on_errback': True} - - sig_1 = self.add.s(0, 1) - sig_1_res = sig_1.freeze() - group_sig = group([self.add.s(3), self.add.s(4)]) - group_sig_res = group_sig.freeze() - chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) - chord_sig_res = chord_sig.freeze() - sig_2 = self.add.s(2) - sig_2_res = sig_2.freeze() - chain_sig = chain( - sig_1, # --> 1 - group_sig, # --> [1+3, 1+4] --> [4, 5] - chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 - sig_2 # --> 18 + 2 --> 20 - ) - callback = signature('callback_task') - errback = signature('errback_task') - chain_sig.stamp(visitor=CustomStampingVisitor()) - chain_sig.link(callback) - chain_sig.link_error(errback) - chain_sig_res = chain_sig.apply_async() - chain_sig_res.get() - - with subtests.test("Confirm the chain was executed correctly", result=20): - # Before we run our assersions, let's confirm the base functionality of the chain is working - # as expected including the links stamping. - assert chain_sig_res.result == 20 - - with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): - for result in group_sig_res.results: - assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "groups"]): - assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "groups"]) - - with subtests.test("callback is stamped with custom visitor", - stamped_headers=["header", "groups, on_callback"]): - callback_link = chain_sig.options['link'][0] - headers = callback_link.options - stamped_headers = headers['stamped_headers'] - assert 'on_callback' not in stamped_headers, "Linking after stamping should not stamp the callback" - assert sorted(stamped_headers) == sorted(["header", "groups"]) - assert headers['header'] == 'value' - - with subtests.test("errback is stamped with custom visitor", - stamped_headers=["header", "groups, on_errback"]): - errback_link = chain_sig.options['link_error'][0] - headers = errback_link.options - stamped_headers = headers['stamped_headers'] - assert 'on_callback' not in stamped_headers, "Linking after stamping should not stamp the errback" - assert sorted(stamped_headers) == sorted(["header", "groups"]) - assert headers['header'] == 'value' - - @pytest.mark.usefixtures('depends_on_current_app') - def test_callback_stamping_on_replace(self, subtests): - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value'} - - def on_callback(self, callback, **header) -> dict: - return {'on_callback': True} - - def on_errback(self, errback, **header) -> dict: - return {'on_errback': True} - - class MyTask(Task): - def on_replace(self, sig): - sig.stamp(CustomStampingVisitor()) - return super().on_replace(sig) - - mytask = self.app.task(shared=False, base=MyTask)(return_True) - - sig1 = signature('sig1') - callback = signature('callback_task') - errback = signature('errback_task') - sig1.link(callback) - sig1.link_error(errback) - - with subtests.test("callback is not stamped with custom visitor yet"): - callback_link = sig1.options['link'][0] - headers = callback_link.options - assert 'on_callback' not in headers - assert 'header' not in headers - - with subtests.test("errback is not stamped with custom visitor yet"): - errback_link = sig1.options['link_error'][0] - headers = errback_link.options - assert 'on_errback' not in headers - assert 'header' not in headers - - with pytest.raises(Ignore): - mytask.replace(sig1) - - with subtests.test("callback is stamped with custom visitor", - stamped_headers=["header", "groups, on_callback"]): - callback_link = sig1.options['link'][0] - headers = callback_link.options - stamped_headers = headers['stamped_headers'] - assert sorted(stamped_headers) == sorted(["header", "groups", "on_callback"]) - assert headers['on_callback'] is True - assert headers['header'] == 'value' - - with subtests.test("errback is stamped with custom visitor", - stamped_headers=["header", "groups, on_errback"]): - errback_link = sig1.options['link_error'][0] - headers = errback_link.options - stamped_headers = headers['stamped_headers'] - assert sorted(stamped_headers) == sorted(["header", "groups", "on_errback"]) - assert headers['on_errback'] is True - assert headers['header'] == 'value' - - @pytest.mark.parametrize('sig_to_replace', [ - group(signature(f'sig{i}') for i in range(2)), - group([signature('sig1'), signature('sig2')]), - group((signature('sig1'), signature('sig2'))), - group(signature('sig1'), signature('sig2')), - chain(signature('sig1'), signature('sig2')), - ]) - @pytest.mark.usefixtures('depends_on_current_app') - def test_replacing_stamped_canvas_with_tasks(self, subtests, sig_to_replace): - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value'} - - class MyTask(Task): - def on_replace(self, sig): - nonlocal assertion_result - nonlocal failed_task - tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks - assertion_result = len(tasks) == 2 - for task in tasks: - assertion_result = all([ - assertion_result, - 'header' in task.options['stamped_headers'], - all([header in task.options for header in task.options['stamped_headers']]), - ]) - if not assertion_result: - failed_task = task - break - - return super().on_replace(sig) - - @self.app.task(shared=False, bind=True, base=MyTask) - def replace_from_MyTask(self): - # Allows easy assertion for the test without using Mock - return self.replace(sig_to_replace) - - sig = replace_from_MyTask.s() - sig.stamp(CustomStampingVisitor()) - assertion_result = False - failed_task = None - sig.apply() - assert assertion_result, f"Task {failed_task} was not stamped correctly" - - @pytest.mark.usefixtures('depends_on_current_app') - def test_replacing_stamped_canvas_with_tasks_with_links(self): - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'header': 'value'} - - class MyTask(Task): - def on_replace(self, sig): - nonlocal assertion_result - nonlocal failed_task - nonlocal failed_task_link - tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks - assertion_result = True - for task in tasks: - links = task.options['link'] - links.extend(task.options['link_error']) - for link in links: - assertion_result = all([ - assertion_result, - all([ - stamped_header in link['options'] - for stamped_header in link['options']['stamped_headers'] - ]), - ]) - else: - if not assertion_result: - failed_task_link = link - break - - assertion_result = all([ - assertion_result, - task.options['stamped_headers']['header'] == 'value', - all([ - header in task.options - for header in task.options['stamped_headers'] - ]), - ]) - - if not assertion_result: - failed_task = task - break - - return super().on_replace(sig) - - @self.app.task(shared=False, bind=True, base=MyTask) - def replace_from_MyTask(self): - # Allows easy assertion for the test without using Mock - return self.replace(sig_to_replace) - - s1 = chain(signature('foo11'), signature('foo12')) - s1.link(signature('link_foo1')) - s1.link_error(signature('link_error_foo1')) - - s2 = chain(signature('foo21'), signature('foo22')) - s2.link(signature('link_foo2')) - s2.link_error(signature('link_error_foo2')) - - sig_to_replace = group([s1, s2]) - sig = replace_from_MyTask.s() - sig.stamp(CustomStampingVisitor()) - assertion_result = False - failed_task = None - failed_task_link = None - sig.apply() - - err_msg = f"Task {failed_task} was not stamped correctly" if failed_task else \ - f"Task link {failed_task_link} was not stamped correctly" if failed_task_link else \ - "Assertion failed" - assert assertion_result, err_msg - def test_getitem_property_class(self): assert Signature.task assert Signature.args @@ -1019,15 +585,11 @@ def s(*args, **kwargs): assert c.tasks[-1].options['chord'] == 'some_chord_id' c.apply_async(link=[s(32)]) - expected_sig = s(32) - expected_sig.stamp(visitor=GroupStampingVisitor()) - assert c.tasks[-1].options['link'] == [expected_sig] + assert c.tasks[-1].options['link'] == [s(32)] c.apply_async(link_error=[s('error')]) - expected_sig = s('error') - expected_sig.stamp(visitor=GroupStampingVisitor()) for task in c.tasks: - assert task.options['link_error'] == [expected_sig] + assert task.options['link_error'] == [s('error')] def test_apply_options_none(self): class static(Signature): @@ -1222,327 +784,6 @@ def link_chain(sig): class test_group(CanvasCase): - def test_group_stamping_one_level(self, subtests): - """ - Test that when a group ID is frozen, that group ID is stored in - each task within the group. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(4, 4) - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - - g = group(sig_1, sig_2, app=self.app) - g.stamp(stamp="stamp") - g_res = g.freeze() - g.apply() - - with subtests.test("sig_1_res is stamped", groups=[g_res.id]): - assert sig_1_res._get_task_meta()['groups'] == [g_res.id] - - with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): - assert sig_1_res._get_task_meta()['stamp'] == ["stamp"] - - with subtests.test("sig_2_res is stamped", groups=[g_res.id]): - assert sig_2_res._get_task_meta()['groups'] == [g_res.id] - - with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): - assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] - - with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sorted(sig_1_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) - - with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): - assert sorted(sig_2_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) - - def test_group_stamping_two_levels(self, subtests): - """ - For a group within a group, test that group stamps are stored in - the correct order. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(1, 1) - nested_sig_1 = self.add.s(2) - nested_sig_2 = self.add.s(4) - - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - first_nested_sig_res = nested_sig_1.freeze() - second_nested_sig_res = nested_sig_2.freeze() - - g2 = group( - nested_sig_1, - nested_sig_2, - app=self.app - ) - - g2_res = g2.freeze() - - g1 = group( - sig_1, - chain( - sig_2, - g2, - app=self.app - ), - app=self.app - ) - - g1_res = g1.freeze() - g1.apply() - - with subtests.test("sig_1_res is stamped", groups=[g1_res.id]): - assert sig_1_res._get_task_meta()['groups'] == [g1_res.id] - with subtests.test("sig_2_res is stamped", groups=[g1_res.id]): - assert sig_2_res._get_task_meta()['groups'] == [g1_res.id] - with subtests.test("first_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sorted(first_nested_sig_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - with subtests.test("second_nested_sig_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sorted(second_nested_sig_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - - def test_group_stamping_with_replace(self, subtests): - """ - For a group within a replaced element, test that group stamps are replaced correctly. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(2, 2) | self.replaced.s(8) - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - - g = group(sig_1, sig_2, app=self.app) - g_res = g.freeze() - g.apply() - - with subtests.test("sig_1_res is stamped", groups=[g_res.id]): - assert sig_1_res._get_task_meta()['groups'] == [g_res.id] - with subtests.test("sig_2_res is stamped", groups=[g_res.id]): - assert sig_2_res._get_task_meta()['groups'] == [g_res.id] - - def test_group_stamping_with_replaced_group(self, subtests): - """ - For a group within a replaced element, test that group stamps are replaced correctly. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - nested_g = self.replace_with_group.s(8) - nested_g_res = nested_g.freeze() - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(2, 2) | nested_g - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - - g = group(sig_1, sig_2, app=self.app) - g_res = g.freeze() - g.apply() - - with subtests.test("sig_1_res is stamped", groups=[g_res.id]): - assert sig_1_res._get_task_meta()['groups'] == [g_res.id] - with subtests.test("sig_2_res is stamped", groups=nested_g_res._get_task_meta()['groups']): - assert sig_2_res._get_task_meta()['groups'] == nested_g_res._get_task_meta()['groups'] - - def test_group_stamping_with_replaced_chain(self, subtests): - """ - For a group within a replaced element, test that group stamps are replaced correctly. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - nested_g = self.replace_with_chain.s(8) - nested_g_res = nested_g.freeze() - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(2, 2) | nested_g - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - - g = group(sig_1, sig_2, app=self.app) - g_res = g.freeze() - g.apply() - - with subtests.test("sig_1_res is stamped", groups=[g_res.id]): - assert sig_1_res._get_task_meta()['groups'] == [g_res.id] - with subtests.test("sig_2_res is stamped", groups=nested_g_res._get_task_meta()['groups']): - assert sig_2_res._get_task_meta()['groups'] == nested_g_res._get_task_meta()['groups'] - - def test_group_stamping_three_levels(self, subtests): - """ - For groups with three levels of nesting, test that group stamps - are saved in the correct order for all nesting levels. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_in_g1_1 = self.add.s(2, 2) - sig_in_g1_2 = self.add.s(1, 1) - sig_in_g2 = self.add.s(2) - sig_in_g2_chain = self.add.s(4) - sig_in_g3_1 = self.add.s(8) - sig_in_g3_2 = self.add.s(16) - - sig_in_g1_1_res = sig_in_g1_1.freeze() - sig_in_g1_2_res = sig_in_g1_2.freeze() - sig_in_g2_res = sig_in_g2.freeze() - sig_in_g2_chain_res = sig_in_g2_chain.freeze() - sig_in_g3_1_res = sig_in_g3_1.freeze() - sig_in_g3_2_res = sig_in_g3_2.freeze() - - g3 = group( - sig_in_g3_1, - sig_in_g3_2, - app=self.app - ) - - g3_res = g3.freeze() - - g2 = group( - sig_in_g2, - chain( - sig_in_g2_chain, - g3 - ), - app=self.app - ) - - g2_res = g2.freeze() - - g1 = group( - sig_in_g1_1, - chain( - sig_in_g1_2, - g2, - app=self.app - ), - app=self.app - ) - - g1_res = g1.freeze() - g1.apply() - - with subtests.test("sig_in_g1_1_res is stamped", groups=[g1_res.id]): - assert sig_in_g1_1_res._get_task_meta()['groups'] == [g1_res.id] - with subtests.test("sig_in_g1_2_res is stamped", groups=[g1_res.id]): - assert sig_in_g1_2_res._get_task_meta()['groups'] == [g1_res.id] - with subtests.test("sig_in_g2_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sorted(sig_in_g2_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - with subtests.test("sig_in_g2_chain_res is stamped", groups=[g1_res.id, g2_res.id]): - assert sorted(sig_in_g2_chain_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - with subtests.test("sig_in_g3_1_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): - assert sorted(sig_in_g3_1_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id, g3_res.id]) - with subtests.test("sig_in_g3_2_res is stamped", groups=[g1_res.id, g2_res.id, g3_res.id]): - assert sorted(sig_in_g3_2_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id, g3_res.id]) - - def test_group_stamping_parallel_groups(self, subtests): - """ - In the case of group within a group that is from another canvas - element, ensure that group stamps are added correctly when groups are - run in parallel. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_in_g1 = self.add.s(1, 1) - sig_in_g2_chain = self.add.s(2, 2) - sig_in_g2_1 = self.add.s(4) - sig_in_g2_2 = self.add.s(8) - sig_in_g3_chain = self.add.s(2, 2) - sig_in_g3_1 = self.add.s(4) - sig_in_g3_2 = self.add.s(8) - - sig_in_g1_res = sig_in_g1.freeze(_id='sig_in_g1') - sig_in_g2_chain_res = sig_in_g2_chain.freeze(_id='sig_in_g2_chain') - sig_in_g2_1_res = sig_in_g2_1.freeze(_id='sig_in_g2_1') - sig_in_g2_2_res = sig_in_g2_2.freeze(_id='sig_in_g2_2') - sig_in_g3_chain_res = sig_in_g3_chain.freeze(_id='sig_in_g3_chain') - sig_in_g3_1_res = sig_in_g3_1.freeze(_id='sig_in_g3_1') - sig_in_g3_2_res = sig_in_g3_2.freeze(_id='sig_in_g3_2') - - g3 = group( - sig_in_g3_1, - sig_in_g3_2, - app=self.app - ) - g3_res = g3.freeze(group_id='g3') - - g2 = group( - sig_in_g2_1, - sig_in_g2_2, - app=self.app - ) - g2_res = g2.freeze(group_id='g2') - - g1 = group( - sig_in_g1, - chain( - sig_in_g2_chain, - g2, - app=self.app - ), - chain( - sig_in_g3_chain, - g3, - app=self.app - ), - ) - g1_res = g1.freeze(group_id='g1') - g1.apply() - - with subtests.test("sig_in_g1 is stamped", groups=[g1_res.id]): - assert sig_in_g1_res.id == 'sig_in_g1' - assert sig_in_g1_res._get_task_meta()['groups'] == [g1_res.id] - - with subtests.test("sig_in_g2_chain is stamped", groups=[g1_res.id]): - assert sig_in_g2_chain_res.id == 'sig_in_g2_chain' - assert sig_in_g2_chain_res._get_task_meta()['groups'] == \ - [g1_res.id] - - with subtests.test("sig_in_g2_1 is stamped", groups=[g1_res.id, g2_res.id]): - assert sig_in_g2_1_res.id == 'sig_in_g2_1' - assert sorted(sig_in_g2_1_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - - with subtests.test("sig_in_g2_2 is stamped", - groups=[g1_res.id, g2_res.id]): - assert sig_in_g2_2_res.id == 'sig_in_g2_2' - assert sorted(sig_in_g2_2_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g2_res.id]) - - with subtests.test("sig_in_g3_chain is stamped", - groups=[g1_res.id]): - assert sig_in_g3_chain_res.id == 'sig_in_g3_chain' - assert sig_in_g3_chain_res._get_task_meta()['groups'] == \ - [g1_res.id] - - with subtests.test("sig_in_g3_1 is stamped", - groups=[g1_res.id, g3_res.id]): - assert sig_in_g3_1_res.id == 'sig_in_g3_1' - assert sorted(sig_in_g3_1_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g3_res.id]) - - with subtests.test("sig_in_g3_2 is stamped", - groups=[g1_res.id, g3_res.id]): - assert sorted(sig_in_g3_2_res._get_task_meta()['groups']) == \ - sorted([g1_res.id, g3_res.id]) - def test_repr(self): x = group([self.add.s(2, 2), self.add.s(4, 4)]) assert repr(x) @@ -1949,162 +1190,6 @@ def test_group_prepared(self): class test_chord(CanvasCase): - def test_chord_stamping_one_level(self, subtests): - """ - In the case of group within a chord that is from another canvas - element, ensure that chord stamps are added correctly when chord are - run in parallel. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(4, 4) - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - sig_sum = self.xsum.s() - sig_sum_res = sig_sum.freeze() - - g = chord([sig_1, sig_2], sig_sum, app=self.app) - g.stamp(stamp="stamp") - g.freeze() - g.apply() - - with subtests.test("sig_sum_res body isn't stamped", groups=[]): - assert sig_sum_res._get_task_meta()['groups'] == [] - - with subtests.test("sig_1_res is stamped", groups=[g.id]): - assert sig_1_res._get_task_meta()['groups'] == [g.id] - - with subtests.test("sig_2_res is stamped", groups=[g.id]): - assert sig_2_res._get_task_meta()['groups'] == [g.id] - - with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): - assert sig_1_res._get_task_meta()['stamp'] == ["stamp"] - - with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): - assert sig_2_res._get_task_meta()['stamp'] == ["stamp"] - - with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sorted(sig_1_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) - - with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp", 'groups']): - assert sorted(sig_2_res._get_task_meta()['stamped_headers']) == sorted(['stamp', 'groups']) - - def test_chord_stamping_two_levels(self, subtests): - """ - For a group within a chord, test that group stamps are stored in - the correct order. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - sig_1 = self.add.s(2, 2) - sig_2 = self.add.s(1, 1) - nested_sig_1 = self.add.s(2) - nested_sig_2 = self.add.s(4) - - sig_1_res = sig_1.freeze() - sig_2_res = sig_2.freeze() - first_nested_sig_res = nested_sig_1.freeze() - second_nested_sig_res = nested_sig_2.freeze() - - g2 = group( - nested_sig_1, - nested_sig_2, - app=self.app - ) - - g2_res = g2.freeze() - - sig_sum = self.xsum.s() - sig_sum.freeze() - - g1 = chord([sig_2, chain(sig_1, g2)], sig_sum, app=self.app) - - g1.freeze() - g1.apply() - - with subtests.test("sig_1_res body is stamped", groups=[g1.id]): - assert sig_1_res._get_task_meta()['groups'] == [g1.id] - with subtests.test("sig_2_res body is stamped", groups=[g1.id]): - assert sig_2_res._get_task_meta()['groups'] == [g1.id] - with subtests.test("first_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): - assert sorted(first_nested_sig_res._get_task_meta()['groups']) == \ - sorted([g1.id, g2_res.id]) - with subtests.test("second_nested_sig_res body is stamped", groups=[g1.id, g2_res.id]): - assert sorted(second_nested_sig_res._get_task_meta()['groups']) == \ - sorted([g1.id, g2_res.id]) - - def test_chord_stamping_body_group(self, subtests): - """ - In the case of group within a chord that is from another canvas - element, ensure that chord stamps are added correctly when chord are - run in parallel. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - tasks = [self.add.s(i, i) for i in range(10)] - - sum_task = self.xsum.s() - sum_task_res = sum_task.freeze() - prod_task = self.xprod.s() - prod_task_res = sum_task.freeze() - - body = group(sum_task, prod_task) - - g = chord(tasks, body, app=self.app) - g.freeze() - g.apply() - - with subtests.test("sum_task_res is stamped", groups=[body.id]): - assert sum_task_res._get_task_meta()['groups'] == [body.id] - with subtests.test("prod_task_res is stamped", groups=[body.id]): - assert prod_task_res._get_task_meta()['groups'] == [body.id] - - def test_chord_stamping_body_chord(self, subtests): - """ - In the case of chord within a chord that is from another canvas - element, ensure that chord stamps are added correctly when chord are - run in parallel. - """ - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - parent_header_tasks = group([self.add.s(i, i) for i in range(10)]) - parent_header_tasks_res = parent_header_tasks.freeze() - - sum_task = self.xsum.s() - sum_task_res = sum_task.freeze() - sum_task2 = self.xsum.s() - sum_task_res2 = sum_task2.freeze() - prod_task = self.xprod.s() - prod_task_res = sum_task.freeze() - - body = chord(group(sum_task, prod_task), sum_task2, app=self.app) - - c = chord(parent_header_tasks, body, app=self.app) - c.freeze() - c.apply() - - with subtests.test("parent_header_tasks are stamped", groups=[c.id]): - for ar in parent_header_tasks_res.children: - assert ar._get_task_meta()['groups'] == [c.id] - assert ar._get_task_meta()['groups'] != [body.id] - with subtests.test("sum_task_res is stamped", groups=[body.id]): - assert sum_task_res._get_task_meta()['groups'] == [body.id] - assert sum_task_res._get_task_meta()['groups'] != [c.id] - with subtests.test("prod_task_res is stamped", groups=[body.id]): - assert prod_task_res._get_task_meta()['groups'] == [body.id] - assert prod_task_res._get_task_meta()['groups'] != [c.id] - with subtests.test("sum_task_res2 is NOT stamped", groups=[]): - assert len(sum_task_res2._get_task_meta()['groups']) == 0 - def test__get_app_does_not_exhaust_generator(self): def build_generator(): yield self.add.s(1, 1) diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index 0c3ddf19b0b..e44c0af4b67 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -77,7 +77,7 @@ class AlwaysReady(TSR): with self._chord_context(AlwaysReady) as (cb, retry, _): cb.type.apply_async.assert_called_with( - ([2, 4, 8, 6],), {}, task_id=cb.id, stamped_headers=['groups'], groups=[] + ([2, 4, 8, 6],), {}, task_id=cb.id, ) # didn't retry assert not retry.call_count @@ -234,8 +234,6 @@ def mul(x, y): task_id=None, kwargs={}, interval=10, - groups=[ch.tasks.id], - stamped_headers=['groups'] ) def test_unlock_with_chord_params_and_task_id(self): @@ -258,8 +256,6 @@ def mul(x, y): task_id=sentinel.task_id, kwargs={}, interval=10, - groups=[ch.tasks.id], - stamped_headers=['groups'] ) diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py new file mode 100644 index 00000000000..02f4d54ba28 --- /dev/null +++ b/t/unit/tasks/test_stamping.py @@ -0,0 +1,1229 @@ +import math +import uuid +from collections.abc import Iterable + +import pytest + +from celery import Task +from celery.canvas import Signature, StampingVisitor, _chain, _chord, chain, chord, group, signature +from celery.exceptions import Ignore + + +class LinkingVisitor(StampingVisitor): + def on_signature(self, actual_sig: Signature, **headers) -> dict: + link_workflow = chain( + group(signature("task1"), signature("task2")), + signature("task3"), + ) + link = signature(f"{actual_sig.name}_link") | link_workflow.clone() + actual_sig.link(link) + link_error = signature(f"{actual_sig.name}_link_error") | link_workflow.clone() + actual_sig.link_error(link_error) + return super().on_signature(actual_sig, **headers) + + +class CleanupVisitor(StampingVisitor): + def clean_stamps(self, actual_sig: Signature) -> None: + if "stamped_headers" in actual_sig.options and actual_sig.options["stamped_headers"]: + for stamp in actual_sig.options["stamped_headers"]: + if stamp in actual_sig.options: + actual_sig.options.pop(stamp) + + def clean_links(self, actual_sig: Signature) -> None: + if "link" in actual_sig.options: + actual_sig.options.pop("link") + if "link_error" in actual_sig.options: + actual_sig.options.pop("link_error") + + def on_signature(self, actual_sig: Signature, **headers) -> dict: + self.clean_stamps(actual_sig) + self.clean_links(actual_sig) + return super().on_signature(actual_sig, **headers) + + +class BooleanStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig: Signature, **headers) -> dict: + return {"on_signature": True} + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_group_start": True} + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_chain_start": True} + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + s = super().on_chord_header_start(actual_sig, **header) + s.update({"on_chord_header_start": True}) + return s + + def on_chord_body(self, actual_sig: Signature, **header) -> dict: + return {"on_chord_body": True} + + def on_callback(self, actual_sig: Signature, **header) -> dict: + return {"on_callback": True} + + def on_errback(self, actual_sig: Signature, **header) -> dict: + return {"on_errback": True} + + +class ListStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig: Signature, **headers) -> dict: + return { + "on_signature": ["ListStampingVisitor: on_signature-item1", "ListStampingVisitor: on_signature-item2"] + } + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + return { + "on_group_start": [ + "ListStampingVisitor: on_group_start-item1", + "ListStampingVisitor: on_group_start-item2", + ] + } + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + return { + "on_chain_start": [ + "ListStampingVisitor: on_chain_start-item1", + "ListStampingVisitor: on_chain_start-item2", + ] + } + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + s = super().on_chord_header_start(actual_sig, **header) + s.update( + { + "on_chord_header_start": [ + "ListStampingVisitor: on_chord_header_start-item1", + "ListStampingVisitor: on_chord_header_start-item2", + ] + } + ) + return s + + def on_chord_body(self, actual_sig: Signature, **header) -> dict: + return { + "on_chord_body": ["ListStampingVisitor: on_chord_body-item1", "ListStampingVisitor: on_chord_body-item2"] + } + + def on_callback(self, actual_sig: Signature, **header) -> dict: + return {"on_callback": ["ListStampingVisitor: on_callback-item1", "ListStampingVisitor: on_callback-item2"]} + + def on_errback(self, actual_sig: Signature, **header) -> dict: + return {"on_errback": ["ListStampingVisitor: on_errback-item1", "ListStampingVisitor: on_errback-item2"]} + + +class SetStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig: Signature, **headers) -> dict: + return { + "on_signature": { + "SetStampingVisitor: on_signature-item1", + "SetStampingVisitor: on_signature-item2", + "SetStampingVisitor: on_signature-item3", + } + } + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + return { + "on_group_start": { + "SetStampingVisitor: on_group_start-item1", + "SetStampingVisitor: on_group_start-item2", + "SetStampingVisitor: on_group_start-item3", + } + } + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + return { + "on_chain_start": { + "SetStampingVisitor: on_chain_start-item1", + "SetStampingVisitor: on_chain_start-item2", + "SetStampingVisitor: on_chain_start-item3", + } + } + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + s = super().on_chord_header_start(actual_sig, **header) + s.update( + { + "on_chord_header_start": { + "SetStampingVisitor: on_chord_header_start-item1", + "SetStampingVisitor: on_chord_header_start-item2", + "SetStampingVisitor: on_chord_header_start-item3", + } + } + ) + return s + + def on_chord_body(self, actual_sig: Signature, **header) -> dict: + return { + "on_chord_body": { + "SetStampingVisitor: on_chord_body-item1", + "SetStampingVisitor: on_chord_body-item2", + "SetStampingVisitor: on_chord_body-item3", + } + } + + def on_callback(self, actual_sig: Signature, **header) -> dict: + return { + "on_callback": { + "SetStampingVisitor: on_callback-item1", + "SetStampingVisitor: on_callback-item2", + "SetStampingVisitor: on_callback-item3", + } + } + + def on_errback(self, actual_sig: Signature, **header) -> dict: + return { + "on_errback": { + "SetStampingVisitor: on_errback-item1", + "SetStampingVisitor: on_errback-item2", + "SetStampingVisitor: on_errback-item3", + } + } + + +class StringStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig: Signature, **headers) -> dict: + return {"on_signature": "StringStampingVisitor: on_signature-item1"} + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_group_start": "StringStampingVisitor: on_group_start-item1"} + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_chain_start": "StringStampingVisitor: on_chain_start-item1"} + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + s = super().on_chord_header_start(actual_sig, **header) + s.update({"on_chord_header_start": "StringStampingVisitor: on_chord_header_start-item1"}) + return s + + def on_chord_body(self, actual_sig: Signature, **header) -> dict: + return {"on_chord_body": "StringStampingVisitor: on_chord_body-item1"} + + def on_callback(self, actual_sig: Signature, **header) -> dict: + return {"on_callback": "StringStampingVisitor: on_callback-item1"} + + def on_errback(self, actual_sig: Signature, **header) -> dict: + return {"on_errback": "StringStampingVisitor: on_errback-item1"} + + +class UUIDStampingVisitor(StampingVisitor): + frozen_uuid = str(uuid.uuid4()) + + def on_signature(self, actual_sig: Signature, **headers) -> dict: + return {"on_signature": UUIDStampingVisitor.frozen_uuid} + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_group_start": UUIDStampingVisitor.frozen_uuid} + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + return {"on_chain_start": UUIDStampingVisitor.frozen_uuid} + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + s = super().on_chord_header_start(actual_sig, **header) + s.update({"on_chord_header_start": UUIDStampingVisitor.frozen_uuid}) + return s + + def on_chord_body(self, actual_sig: Signature, **header) -> dict: + return {"on_chord_body": UUIDStampingVisitor.frozen_uuid} + + def on_callback(self, actual_sig: Signature, **header) -> dict: + return {"on_callback": UUIDStampingVisitor.frozen_uuid} + + def on_errback(self, actual_sig: Signature, **header) -> dict: + return {"on_errback": UUIDStampingVisitor.frozen_uuid} + + +class StampsAssertionVisitor(StampingVisitor): + """ + The canvas stamping mechanism traverses the canvas automatically, so we can ride + it to traverse the canvas recursively and assert that all signatures have the correct stamp in options + """ + + def __init__(self, visitor: StampingVisitor, subtests): + self.visitor = visitor + self.subtests = subtests + + def assertion_check(self, actual_sig: Signature, method: str, **headers) -> None: + if any( + [ + isinstance(actual_sig, group), + isinstance(actual_sig, _chain), + isinstance(actual_sig, _chord), + ] + ): + return + + expected_stamp = getattr(self.visitor, method)(actual_sig, **headers)[method] + actual_stamp = actual_sig.options[method] + with self.subtests.test(f"Check if {actual_sig} has stamp: {expected_stamp}"): + if isinstance(self.visitor, ListStampingVisitor) or isinstance(self.visitor, SetStampingVisitor): + assertion_check = all([actual in expected_stamp for actual in actual_stamp]) + else: + assertion_check = actual_stamp == expected_stamp + assertion_error = f"{actual_sig} has stamp {actual_stamp} instead of: {expected_stamp}" + assert assertion_check, assertion_error + + def on_signature(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_signature", **headers) + return super().on_signature(actual_sig, **headers) + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_group_start", **headers) + return super().on_group_start(actual_sig, **headers) + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_chain_start", **headers) + return super().on_chain_start(actual_sig, **headers) + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + self.assertion_check(actual_sig, "on_chord_header_start", **header) + if issubclass(type(actual_sig.tasks), Signature): + self.assertion_check(actual_sig.tasks, "on_chord_header_start", **header) + return super().on_chord_header_start(actual_sig, **header) + + def on_chord_body(self, actual_sig: chord, **header) -> dict: + self.assertion_check(actual_sig.body, "on_chord_body", **header) + return super().on_chord_body(actual_sig, **header) + + def on_callback(self, actual_link_sig: Signature, **header) -> dict: + self.assertion_check(actual_link_sig, "on_callback", **header) + return super().on_callback(actual_link_sig, **header) + + def on_errback(self, actual_linkerr_sig: Signature, **header) -> dict: + self.assertion_check(actual_linkerr_sig, "on_errback", **header) + return super().on_errback(actual_linkerr_sig, **header) + + +class StampedHeadersAssertionVisitor(StampingVisitor): + """ + The canvas stamping mechanism traverses the canvas automatically, so we can ride + it to traverse the canvas recursively and assert that all signatures have the correct + stamp in options["stamped_headers"] + """ + + def __init__(self, visitor: StampingVisitor, subtests): + self.visitor = visitor + self.subtests = subtests + + def assertion_check(self, actual_sig: Signature, expected_stamped_header: str) -> None: + if any( + [ + isinstance(actual_sig, group), + isinstance(actual_sig, _chain), + isinstance(actual_sig, _chord), + ] + ): + with self.subtests.test(f'Check if "stamped_headers" is not in {actual_sig.options}'): + assertion_check = "stamped_headers" not in actual_sig.options + assertion_error = f"{actual_sig} should not have stamped_headers in options" + assert assertion_check, assertion_error + return + + actual_stamped_headers = actual_sig.options["stamped_headers"] + with self.subtests.test(f'Check if {actual_sig}["stamped_headers"] has: {expected_stamped_header}'): + assertion_check = expected_stamped_header in actual_stamped_headers + assertion_error = ( + f'{actual_sig}["stamped_headers"] {actual_stamped_headers} does ' + f"not contain {expected_stamped_header}" + ) + assert assertion_check, assertion_error + + def on_signature(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_signature") + return super().on_signature(actual_sig, **headers) + + def on_group_start(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_group_start") + return super().on_group_start(actual_sig, **headers) + + def on_chain_start(self, actual_sig: Signature, **headers) -> dict: + self.assertion_check(actual_sig, "on_chain_start") + return super().on_chain_start(actual_sig, **headers) + + def on_chord_header_start(self, actual_sig: Signature, **header) -> dict: + self.assertion_check(actual_sig, "on_chord_header_start") + if issubclass(type(actual_sig.tasks), Signature): + self.assertion_check(actual_sig.tasks, "on_chord_header_start") + return super().on_chord_header_start(actual_sig, **header) + + def on_chord_body(self, actual_sig: chord, **header) -> dict: + self.assertion_check(actual_sig.body, "on_chord_body") + return super().on_chord_body(actual_sig, **header) + + def on_callback(self, actual_link_sig: Signature, **header) -> dict: + self.assertion_check(actual_link_sig, "on_callback") + return super().on_callback(actual_link_sig, **header) + + def on_errback(self, actual_linkerr_sig: Signature, **header) -> dict: + self.assertion_check(actual_linkerr_sig, "on_errback") + return super().on_errback(actual_linkerr_sig, **header) + + +def return_True(*args, **kwargs): + return True + + +class CanvasCase: + def setup_method(self): + @self.app.task(shared=False) + def add(x, y): + return x + y + + self.add = add + + @self.app.task(shared=False) + def mul(x, y): + return x * y + + self.mul = mul + + @self.app.task(shared=False) + def div(x, y): + return x / y + + self.div = div + + @self.app.task(shared=False) + def xsum(numbers): + return sum(sum(num) if isinstance(num, Iterable) else num for num in numbers) + + self.xsum = xsum + + @self.app.task(shared=False, bind=True) + def replaced(self, x, y): + return self.replace(add.si(x, y)) + + self.replaced = replaced + + @self.app.task(shared=False, bind=True) + def replaced_group(self, x, y): + return self.replace(group(add.si(x, y), mul.si(x, y))) + + self.replaced_group = replaced_group + + @self.app.task(shared=False, bind=True) + def replace_with_group(self, x, y): + return self.replace(group(add.si(x, y), mul.si(x, y))) + + self.replace_with_group = replace_with_group + + @self.app.task(shared=False, bind=True) + def replace_with_chain(self, x, y): + return self.replace(group(add.si(x, y) | mul.s(y), add.si(x, y))) + + self.replace_with_chain = replace_with_chain + + @self.app.task(shared=False) + def xprod(numbers): + try: + return math.prod(numbers) + except AttributeError: + # TODO: Drop this backport once + # we drop support for Python 3.7 + import operator + from functools import reduce + + return reduce(operator.mul, numbers) + + self.xprod = xprod + + +@pytest.mark.parametrize( + "stamping_visitor", + [ + BooleanStampingVisitor(), + ListStampingVisitor(), + SetStampingVisitor(), + StringStampingVisitor(), + UUIDStampingVisitor(), + ], +) +@pytest.mark.parametrize( + "canvas_workflow", + [ + signature("sig"), + group(signature("sig")), + group(signature("sig1", signature("sig2"))), + group(signature(f"sig{i}") for i in range(2)), + chord((signature(f"sig{i}") for i in range(2)), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), signature("sig3") | signature("sig4")), + chord(signature("sig1"), signature("sig2") | signature("sig3")), + chain( + signature("sig"), + chord((signature(f"sig{i}") for i in range(2)), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), signature("sig3") | signature("sig4")), + chord(signature("sig1"), signature("sig2") | signature("sig3")), + ), + chain( + signature("sig1") | signature("sig2"), + group(signature("sig3"), signature("sig4")) | group(signature(f"sig{i}") for i in range(5, 6)), + chord(group(signature(f"sig{i}") for i in range(6, 8)), signature("sig8")) | signature("sig9"), + ), + chain( + signature("sig"), + chord( + group(signature(f"sig{i}") for i in range(2)), + chain( + signature("sig3"), + chord( + (signature(f"sig{i}") for i in range(4, 6)), + chain( + signature("sig6"), + chord( + group(signature(f"sig{i}") for i in range(7, 9)), + chain( + signature("sig9"), + chord(group(signature("sig10"), signature("sig11")), signature("sig12")), + ), + ), + ), + ), + ), + ), + ), + group( + signature("sig"), + group(signature("sig1")), + group(signature("sig1"), signature("sig2")), + group(signature(f"sig{i}") for i in range(2)), + group([signature("sig1"), signature("sig2")]), + group((signature("sig1"), signature("sig2"))), + chain(signature("sig1"), signature("sig2")), + chord(group(signature("sig1"), signature("sig2")), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), group(signature("sig3"), signature("sig4"))), + chain( + group(signature("sig1"), signature("sig2")), + group(signature("sig3"), signature("sig4")), + signature("sig5"), + ), + chain( + signature("sig1"), + group(signature("sig2"), signature("sig3")), + group(signature("sig4"), signature("sig5")), + ), + chain( + group( + signature("sig1"), + group(signature("sig2")), + group([signature("sig3"), signature("sig4")]), + group(signature(f"sig{i}") for i in range(5, 7)), + ), + chain( + signature("sig8"), + group(signature("sig9"), signature("sig10")), + ), + ), + ), + chain( + signature("sig"), + group(signature("sig1")), + group(signature("sig1"), signature("sig2")), + group(signature(f"sig{i}") for i in range(2)), + group([signature("sig1"), signature("sig2")]), + group((signature("sig1"), signature("sig2"))), + chain(signature("sig1"), signature("sig2")), + chord(group(signature("sig1"), signature("sig2")), signature("sig3")), + chord(group(signature(f"sig{i}") for i in range(2)), group(signature("sig3"), signature("sig4"))), + chain( + group(signature("sig1"), signature("sig2")), + group(signature("sig3"), signature("sig4")), + signature("sig5"), + ), + chain( + signature("sig1"), + group(signature("sig2"), signature("sig3")), + group(signature("sig4"), signature("sig5")), + ), + chain( + group( + signature("sig1"), + group(signature("sig2")), + group([signature("sig3"), signature("sig4")]), + group(signature(f"sig{i}") for i in range(5, 7)), + ), + chain( + signature("sig8"), + group(signature("sig9"), signature("sig10")), + ), + ), + ), + chord( + group( + group(signature(f"sig{i}") for i in range(2)), + group(signature(f"sig{i}") for i in range(2, 4)), + group(signature(f"sig{i}") for i in range(4, 6)), + group(signature(f"sig{i}") for i in range(6, 8)), + ), + chain( + chain( + signature("sig8") | signature("sig9"), + group(signature("sig10"), signature("sig11")) + | group(signature(f"sig{i}") for i in range(12, 14)), + chord(group(signature(f"sig{i}") for i in range(14, 16)), signature("sig16")) + | signature("sig17"), + ), + signature("sig1") | signature("sig2"), + group(signature("sig3"), signature("sig4")) | group(signature(f"sig{i}") for i in range(5, 7)), + chord(group(signature(f"sig{i}") for i in range(7, 9)), signature("sig9")) | signature("sig10"), + ), + ), + ], +) +class test_canvas_stamping(CanvasCase): + @pytest.fixture + def stamped_canvas(self, stamping_visitor: StampingVisitor, canvas_workflow: Signature) -> Signature: + workflow = canvas_workflow.clone() + workflow.stamp(CleanupVisitor()) + workflow.stamp(stamping_visitor) + return workflow + + @pytest.fixture + def stamped_linked_canvas(self, stamping_visitor: StampingVisitor, canvas_workflow: Signature) -> Signature: + workflow = canvas_workflow.clone() + workflow.stamp(CleanupVisitor()) + workflow.stamp(LinkingVisitor()) + workflow.stamp(stamping_visitor) + return workflow + + @pytest.fixture(params=["stamped_canvas", "stamped_linked_canvas"]) + def workflow(self, request, canvas_workflow: Signature) -> Signature: + return request.getfixturevalue(request.param) + + @pytest.mark.usefixtures("depends_on_current_app") + def test_stamp_in_options(self, workflow: Signature, stamping_visitor: StampingVisitor, subtests): + """Test that all canvas signatures gets the stamp in options""" + workflow.stamp(StampsAssertionVisitor(stamping_visitor, subtests)) + + @pytest.mark.usefixtures("depends_on_current_app") + def test_stamping_headers_in_options(self, workflow: Signature, stamping_visitor: StampingVisitor, subtests): + """Test that all canvas signatures gets the stamp in options["stamped_headers"]""" + workflow.stamp(StampedHeadersAssertionVisitor(stamping_visitor, subtests)) + + @pytest.mark.usefixtures("depends_on_current_app") + def test_stamping_with_replace(self, workflow: Signature, stamping_visitor: StampingVisitor, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class AssertionTask(Task): + def on_stamp_replaced(self, sig: Signature, visitor=None): + return super().on_stamp_replaced(sig, visitor=stamping_visitor) + + def on_replace(self, sig: Signature): + nonlocal assertion_result + sig.stamp(StampsAssertionVisitor(stamping_visitor, subtests)) + sig.stamp(StampedHeadersAssertionVisitor(stamping_visitor, subtests)) + assertion_result = True + return super().on_replace(sig) + + @self.app.task(shared=False, bind=True, base=AssertionTask) + def assert_using_replace(self: AssertionTask): + assert self.request.stamped_headers is not None, "stamped_headers should be set" + assert self.request.stamps is not None, "stamps should be set" + return self.replace(workflow) + + @self.app.task(shared=False, bind=True) + def stamp_using_replace(self: Task): + return self.replace(assert_using_replace.s(), visitor=stamping_visitor) + + replaced_sig = stamp_using_replace.s() + assertion_result = False + replaced_sig.apply() + assert assertion_result + + +class test_stamping_mechanism(CanvasCase): + """These tests were extracted (and fixed) from the canvas unit tests.""" + + def test_on_signature_gets_the_signature(self): + expected_sig = self.add.s(4, 2) + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, actual_sig, **headers) -> dict: + nonlocal expected_sig + assert actual_sig == expected_sig + return {"header": "value"} + + sig = expected_sig.clone() + sig.stamp(CustomStampingVisitor()) + assert sig.options["header"] == "value" + + def test_double_stamping(self, subtests): + """ + Test manual signature stamping with two different stamps. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_1.stamp(stamp1="stamp1") + sig_1.stamp(stamp2="stamp2") + sig_1_res = sig_1.freeze() + sig_1.apply() + + with subtests.test("sig_1_res is stamped with stamp1", stamp1=["stamp1"]): + assert sig_1_res._get_task_meta()["stamp1"] == ["stamp1"] + + with subtests.test("sig_1_res is stamped with stamp2", stamp2=["stamp2"]): + assert sig_1_res._get_task_meta()["stamp2"] == ["stamp2"] + + with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp2", "stamp1"]) + + def test_twice_stamping(self, subtests): + """ + Test manual signature stamping with two stamps twice. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_1.stamp(stamp1="stamp1") + sig_1.stamp(stamp2="stamp") + sig_1.stamp(stamp2="stamp2") + sig_1.stamp(stamp3=["stamp3"]) + sig_1_res = sig_1.freeze() + sig_1.apply() + + with subtests.test("sig_1_res is stamped twice", stamps=["stamp2", "stamp1"]): + assert sorted(sig_1_res._get_task_meta()["stamp1"]) == ["stamp1"] + assert sorted(sig_1_res._get_task_meta()["stamp2"]) == ["stamp2"] + assert sorted(sig_1_res._get_task_meta()["stamp3"]) == ["stamp3"] + + with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp1", "stamp2", "stamp3"]) + + def test_manual_stamping(self): + """ + Test manual signature stamping. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + stamps = ["stamp1", "stamp2"] + sig_1.stamp(visitor=None, groups=[stamps[1]]) + sig_1.stamp(visitor=None, groups=stamps[0]) + sig_1_res = sig_1.freeze() + sig_1.apply() + assert sorted(sig_1_res._get_task_meta()["groups"]) == [stamps[0]] + + def test_custom_stamping_visitor(self, subtests): + """ + Test manual signature stamping with a custom visitor class. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor1(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + # without using stamped_headers key explicitly + # the key will be calculated from the headers implicitly + return {"header": "value"} + + class CustomStampingVisitor2(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value", "stamped_headers": ["header"]} + + sig_1 = self.add.s(2, 2) + sig_1.stamp(visitor=CustomStampingVisitor1()) + sig_1_res = sig_1.freeze() + sig_1.apply() + sig_2 = self.add.s(2, 2) + sig_2.stamp(visitor=CustomStampingVisitor2()) + sig_2_res = sig_2.freeze() + sig_2.apply() + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("sig_1 is stamped with custom visitor", header=["value"]): + assert sig_1_res._get_task_meta()["header"] == ["value"] + + with subtests.test("sig_2 is stamped with custom visitor", header=["value"]): + assert sig_2_res._get_task_meta()["header"] == ["value"] + + def test_callback_stamping(self, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + def on_callback(self, callback, **header) -> dict: + return {"on_callback": True} + + def on_errback(self, errback, **header) -> dict: + return {"on_errback": True} + + sig_1 = self.add.s(0, 1) + sig_1_res = sig_1.freeze() + group_sig = group([self.add.s(3), self.add.s(4)]) + group_sig_res = group_sig.freeze() + chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) + chord_sig_res = chord_sig.freeze() + sig_2 = self.add.s(2) + sig_2_res = sig_2.freeze() + chain_sig = chain( + sig_1, # --> 1 + group_sig, # --> [1+3, 1+4] --> [4, 5] + chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 + sig_2, # --> 18 + 2 --> 20 + ) + callback = signature("callback_task") + errback = signature("errback_task") + chain_sig.link(callback) + chain_sig.link_error(errback) + chain_sig.stamp(visitor=CustomStampingVisitor()) + chain_sig_res = chain_sig.apply_async() + chain_sig_res.get() + + with subtests.test("Confirm the chain was executed correctly", result=20): + # Before we run our assertions, let's confirm the base functionality of the chain is working + # as expected including the links stamping. + assert chain_sig_res.result == 20 + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header"]): + for result in group_sig_res.results: + assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test( + "callback is stamped with custom visitor", + stamped_headers=["header", "on_callback"], + ): + callback_link = chain_sig.options["link"][0] + headers = callback_link.options + stamped_headers = headers["stamped_headers"] + assert sorted(stamped_headers) == sorted(["header", "on_callback"]) + assert headers["on_callback"] is True + assert headers["header"] == "value" + + with subtests.test( + "errback is stamped with custom visitor", + stamped_headers=["header", "on_errback"], + ): + errback_link = chain_sig.options["link_error"][0] + headers = errback_link.options + stamped_headers = headers["stamped_headers"] + assert sorted(stamped_headers) == sorted(["header", "on_errback"]) + assert headers["on_errback"] is True + assert headers["header"] == "value" + + def test_callback_stamping_link_after_stamp(self, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + def on_callback(self, callback, **header) -> dict: + return {"on_callback": True} + + def on_errback(self, errback, **header) -> dict: + return {"on_errback": True} + + sig_1 = self.add.s(0, 1) + sig_1_res = sig_1.freeze() + group_sig = group([self.add.s(3), self.add.s(4)]) + group_sig_res = group_sig.freeze() + chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) + chord_sig_res = chord_sig.freeze() + sig_2 = self.add.s(2) + sig_2_res = sig_2.freeze() + chain_sig = chain( + sig_1, # --> 1 + group_sig, # --> [1+3, 1+4] --> [4, 5] + chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 + sig_2, # --> 18 + 2 --> 20 + ) + callback = signature("callback_task") + errback = signature("errback_task") + chain_sig.stamp(visitor=CustomStampingVisitor()) + chain_sig.link(callback) + chain_sig.link_error(errback) + chain_sig_res = chain_sig.apply_async() + chain_sig_res.get() + + with subtests.test("Confirm the chain was executed correctly", result=20): + # Before we run our assertions, let's confirm the base functionality of the chain is working + # as expected including the links stamping. + assert chain_sig_res.result == 20 + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header"]): + for result in group_sig_res.results: + assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header"]) + + with subtests.test("callback is not stamped"): + callback_link = chain_sig.options["link"][0] + headers = callback_link.options + stamped_headers = headers.get("stamped_headers", []) + assert "on_callback" not in stamped_headers, "Linking after stamping should not stamp the callback" + assert stamped_headers == [] + + with subtests.test("errback is not stamped"): + errback_link = chain_sig.options["link_error"][0] + headers = errback_link.options + stamped_headers = headers.get("stamped_headers", []) + assert "on_callback" not in stamped_headers, "Linking after stamping should not stamp the errback" + assert stamped_headers == [] + + def test_callback_stamping_link_multiple_visitors(self, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + def on_callback(self, callback, **header) -> dict: + return {"on_callback": True} + + def on_errback(self, errback, **header) -> dict: + return {"on_errback": True} + + class CustomStampingVisitor2(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header2": "value2"} + + def on_callback(self, callback, **header) -> dict: + return {"on_callback2": "True"} + + def on_errback(self, errback, **header) -> dict: + return {"on_errback2": "True"} + + sig_1 = self.add.s(0, 1) + sig_1_res = sig_1.freeze() + group_sig = group([self.add.s(3), self.add.s(4)]) + group_sig_res = group_sig.freeze() + chord_sig = chord([self.xsum.s(), self.xsum.s()], self.xsum.s()) + chord_sig_res = chord_sig.freeze() + sig_2 = self.add.s(2) + sig_2_res = sig_2.freeze() + chain_sig = chain( + sig_1, # --> 1 + group_sig, # --> [1+3, 1+4] --> [4, 5] + chord_sig, # --> [4+5, 4+5] --> [9, 9] --> 9+9 --> 18 + sig_2, # --> 18 + 2 --> 20 + ) + callback = signature("callback_task") + errback = signature("errback_task") + chain_sig.stamp(visitor=CustomStampingVisitor()) + chain_sig.link(callback) + chain_sig.link_error(errback) + chain_sig.stamp(visitor=CustomStampingVisitor2()) + chain_sig_res = chain_sig.apply_async() + chain_sig_res.get() + + with subtests.test("Confirm the chain was executed correctly", result=20): + # Before we run our assertions, let's confirm the base functionality of the chain is working + # as expected including the links stamping. + assert chain_sig_res.result == 20 + + with subtests.test("sig_1 is stamped with custom visitor", stamped_headers=["header", "header2"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["header", "header2"]) + + with subtests.test("group_sig is stamped with custom visitor", stamped_headers=["header", "header2"]): + for result in group_sig_res.results: + assert sorted(result._get_task_meta()["stamped_headers"]) == sorted(["header", "header2"]) + + with subtests.test("chord_sig is stamped with custom visitor", stamped_headers=["header", "header2"]): + assert sorted(chord_sig_res._get_task_meta()["stamped_headers"]) == sorted(["header", "header2"]) + + with subtests.test("sig_2 is stamped with custom visitor", stamped_headers=["header", "header2"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["header", "header2"]) + + with subtests.test("callback is stamped"): + callback_link = chain_sig.options["link"][0] + headers = callback_link.options + stamped_headers = headers.get("stamped_headers", []) + assert "on_callback2" in stamped_headers, "Linking after stamping should stamp the callback" + expected_stamped_headers = list(CustomStampingVisitor2().on_signature(None).keys()) + expected_stamped_headers.extend(list(CustomStampingVisitor2().on_callback(None).keys())) + assert sorted(stamped_headers) == sorted(expected_stamped_headers) + + with subtests.test("errback is stamped"): + errback_link = chain_sig.options["link_error"][0] + headers = errback_link.options + stamped_headers = headers.get("stamped_headers", []) + assert "on_errback2" in stamped_headers, "Linking after stamping should stamp the errback" + expected_stamped_headers = list(CustomStampingVisitor2().on_signature(None).keys()) + expected_stamped_headers.extend(list(CustomStampingVisitor2().on_errback(None).keys())) + assert sorted(stamped_headers) == sorted(expected_stamped_headers) + + @pytest.mark.usefixtures("depends_on_current_app") + def test_callback_stamping_on_replace(self, subtests): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + def on_callback(self, callback, **header) -> dict: + return {"on_callback": True} + + def on_errback(self, errback, **header) -> dict: + return {"on_errback": True} + + class MyTask(Task): + def on_replace(self, sig): + sig.stamp(CustomStampingVisitor()) + return super().on_replace(sig) + + mytask = self.app.task(shared=False, base=MyTask)(return_True) + + sig1 = signature("sig1") + callback = signature("callback_task") + errback = signature("errback_task") + sig1.link(callback) + sig1.link_error(errback) + + with subtests.test("callback is not stamped with custom visitor yet"): + callback_link = sig1.options["link"][0] + headers = callback_link.options + assert "on_callback" not in headers + assert "header" not in headers + + with subtests.test("errback is not stamped with custom visitor yet"): + errback_link = sig1.options["link_error"][0] + headers = errback_link.options + assert "on_errback" not in headers + assert "header" not in headers + + with pytest.raises(Ignore): + mytask.replace(sig1) + + with subtests.test( + "callback is stamped with custom visitor", + stamped_headers=["header", "on_callback"], + ): + callback_link = sig1.options["link"][0] + headers = callback_link.options + stamped_headers = headers["stamped_headers"] + assert sorted(stamped_headers) == sorted(["header", "on_callback"]) + assert headers["on_callback"] is True + assert headers["header"] == "value" + + with subtests.test( + "errback is stamped with custom visitor", + stamped_headers=["header", "on_errback"], + ): + errback_link = sig1.options["link_error"][0] + headers = errback_link.options + stamped_headers = headers["stamped_headers"] + assert sorted(stamped_headers) == sorted(["header", "on_errback"]) + assert headers["on_errback"] is True + assert headers["header"] == "value" + + @pytest.mark.parametrize( + "sig_to_replace", + [ + group(signature(f"sig{i}") for i in range(2)), + group([signature("sig1"), signature("sig2")]), + group((signature("sig1"), signature("sig2"))), + group(signature("sig1"), signature("sig2")), + chain(signature("sig1"), signature("sig2")), + ], + ) + @pytest.mark.usefixtures("depends_on_current_app") + def test_replacing_stamped_canvas_with_tasks(self, subtests, sig_to_replace): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + class MyTask(Task): + def on_replace(self, sig): + nonlocal assertion_result + nonlocal failed_task + tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks + assertion_result = len(tasks) == 2 + for task in tasks: + assertion_result = all( + [ + assertion_result, + "header" in task.options["stamped_headers"], + all([header in task.options for header in task.options["stamped_headers"]]), + ] + ) + if not assertion_result: + failed_task = task + break + + return super().on_replace(sig) + + @self.app.task(shared=False, bind=True, base=MyTask) + def replace_from_MyTask(self): + # Allows easy assertion for the test without using Mock + return self.replace(sig_to_replace) + + sig = replace_from_MyTask.s() + sig.stamp(CustomStampingVisitor()) + assertion_result = False + failed_task = None + sig.apply() + assert assertion_result, f"Task {failed_task} was not stamped correctly" + + @pytest.mark.usefixtures("depends_on_current_app") + def test_replacing_stamped_canvas_with_tasks_with_links(self): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + class MyTask(Task): + def on_replace(self, sig): + nonlocal assertion_result + nonlocal failed_task + nonlocal failed_task_link + tasks = sig.tasks.tasks if isinstance(sig.tasks, group) else sig.tasks + assertion_result = True + for task in tasks: + links = task.options["link"] + links.extend(task.options["link_error"]) + for link in links: + assertion_result = all( + [ + assertion_result, + all( + [ + stamped_header in link["options"] + for stamped_header in link["options"]["stamped_headers"] + ] + ), + ] + ) + else: + if not assertion_result: + failed_task_link = link + break + + assertion_result = all( + [ + assertion_result, + task.options["stamped_headers"]["header"] == "value", + all([header in task.options for header in task.options["stamped_headers"]]), + ] + ) + + if not assertion_result: + failed_task = task + break + + return super().on_replace(sig) + + @self.app.task(shared=False, bind=True, base=MyTask) + def replace_from_MyTask(self): + # Allows easy assertion for the test without using Mock + return self.replace(sig_to_replace) + + s1 = chain(signature("foo11"), signature("foo12")) + s1.link(signature("link_foo1")) + s1.link_error(signature("link_error_foo1")) + + s2 = chain(signature("foo21"), signature("foo22")) + s2.link(signature("link_foo2")) + s2.link_error(signature("link_error_foo2")) + + sig_to_replace = group([s1, s2]) + sig = replace_from_MyTask.s() + sig.stamp(CustomStampingVisitor()) + assertion_result = False + failed_task = None + failed_task_link = None + sig.apply() + + err_msg = ( + f"Task {failed_task} was not stamped correctly" + if failed_task + else f"Task link {failed_task_link} was not stamped correctly" + if failed_task_link + else "Assertion failed" + ) + assert assertion_result, err_msg + + def test_group_stamping_one_level(self, subtests): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(4, 4) + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + + g = group(sig_1, sig_2, app=self.app) + g.stamp(stamp="stamp") + g.apply() + + with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): + assert sig_1_res._get_task_meta()["stamp"] == ["stamp"] + + with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): + assert sig_2_res._get_task_meta()["stamp"] == ["stamp"] + + with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) + + with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) + + def test_chord_stamping_one_level(self, subtests): + """ + In the case of group within a chord that is from another canvas + element, ensure that chord stamps are added correctly when chord are + run in parallel. + """ + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + sig_1 = self.add.s(2, 2) + sig_2 = self.add.s(4, 4) + sig_1_res = sig_1.freeze() + sig_2_res = sig_2.freeze() + sig_sum = self.xsum.s() + + g = chord([sig_1, sig_2], sig_sum, app=self.app) + g.stamp(stamp="stamp") + g.freeze() + g.apply() + + with subtests.test("sig_1_res is stamped manually", stamp=["stamp"]): + assert sig_1_res._get_task_meta()["stamp"] == ["stamp"] + + with subtests.test("sig_2_res is stamped manually", stamp=["stamp"]): + assert sig_2_res._get_task_meta()["stamp"] == ["stamp"] + + with subtests.test("sig_1_res has stamped_headers", stamped_headers=["stamp"]): + assert sorted(sig_1_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) + + with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): + assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) From 17dff0c00e5cd72a1fceb8fb7f66d62debdba68a Mon Sep 17 00:00:00 2001 From: charlietruong Date: Sat, 4 Feb 2023 16:16:36 -0600 Subject: [PATCH 0300/1051] result_backend_thread_safe config shares backend across threads --- celery/app/base.py | 28 +++++++++++++++++++++++----- celery/backends/base.py | 1 + docs/userguide/configuration.rst | 12 ++++++++++++ t/unit/app/test_app.py | 15 +++++++++++++++ t/unit/backends/test_base.py | 12 ++++++++++++ 5 files changed, 63 insertions(+), 5 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 8281f5510b9..3f8b2ec0a70 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -229,6 +229,7 @@ def __init__(self, main=None, loader=None, backend=None, **kwargs): self._local = threading.local() + self._backend_cache = None self.clock = LamportClock() self.main = main @@ -1249,14 +1250,31 @@ def amqp(self): """AMQP related functionality: :class:`~@amqp`.""" return instantiate(self.amqp_cls, app=self) + @property + def _backend(self): + """A reference to the backend object + + Uses self._backend_cache if it is thread safe. + Otherwise, use self._local + """ + if self._backend_cache is not None: + return self._backend_cache + return getattr(self._local, "backend", None) + + @_backend.setter + def _backend(self, backend): + """Set the backend object on the app""" + if backend.thread_safe: + self._backend_cache = backend + else: + self._local.backend = backend + @property def backend(self): """Current backend instance.""" - try: - return self._local.backend - except AttributeError: - self._local.backend = new_backend = self._get_backend() - return new_backend + if self._backend is None: + self._backend = self._get_backend() + return self._backend @property def conf(self): diff --git a/celery/backends/base.py b/celery/backends/base.py index 22710cb3c56..8a391cf0baa 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -131,6 +131,7 @@ def __init__(self, app, self.max_sleep_between_retries_ms = conf.get('result_backend_max_sleep_between_retries_ms', 10000) self.base_sleep_between_retries_ms = conf.get('result_backend_base_sleep_between_retries_ms', 10) self.max_retries = conf.get('result_backend_max_retries', float("inf")) + self.thread_safe = conf.get('result_backend_thread_safe', False) self._pending_results = pending_results_t({}, WeakValueDictionary()) self._pending_messages = BufferMap(MESSAGE_BUFFER_MAX) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 17f3cb2757b..a43681b10aa 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -792,6 +792,18 @@ Default: Inf This is the maximum of retries in case of recoverable exceptions. +.. setting:: result_backend_thread_safe + +``result_backend_thread_safe`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: False + +If True, then the backend object is shared across threads. +This may be useful for using a shared connection pool instead of creating +a connection for every thread. + + .. setting:: result_backend_transport_options ``result_backend_transport_options`` diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 9d504f9fcc4..664a0ea6b7c 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1052,6 +1052,21 @@ def test_thread_oid_is_local(self): uuid.UUID(thread_oid) assert main_oid != thread_oid + def test_thread_backend_thread_safe(self): + # Should share the backend object across threads + from concurrent.futures import ThreadPoolExecutor + + with self.Celery() as app: + app.conf.update(result_backend_thread_safe=True) + main_backend = app.backend + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(lambda: app.backend) + + thread_backend = future.result() + assert isinstance(main_backend, Backend) + assert isinstance(thread_backend, Backend) + assert main_backend is thread_backend + class test_defaults: diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index d520a5d3608..981305c72f4 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -1219,3 +1219,15 @@ def test_store_result_reaching_max_retries(self): finally: self.app.conf.result_backend_always_retry = prev self.app.conf.result_backend_max_retries = prev_max_retries + + def test_result_backend_thread_safe(self): + # Should identify the backend as thread safe + self.app.conf.result_backend_thread_safe = True + b = BaseBackend(app=self.app) + assert b.thread_safe is True + + def test_result_backend_not_thread_safe(self): + # Should identify the backend as not being thread safe + self.app.conf.result_backend_thread_safe = False + b = BaseBackend(app=self.app) + assert b.thread_safe is False From b489810fecacfde6e9beaa6ad79d48c5e924c443 Mon Sep 17 00:00:00 2001 From: Seth Wang Date: Mon, 6 Feb 2023 08:51:57 +0800 Subject: [PATCH 0301/1051] Fix cronjob that use day of month and negative UTC timezone Before this patch, the cross day schedule jumps to the future and some tasks are skipped --- celery/schedules.py | 9 ++++----- celery/utils/time.py | 5 +++-- t/unit/app/test_schedules.py | 39 ++++++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 7 deletions(-) diff --git a/celery/schedules.py b/celery/schedules.py index 9798579754f..89fb5a3c890 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -72,8 +72,8 @@ def remaining_estimate(self, last_run_at): def is_due(self, last_run_at): raise NotImplementedError() - def maybe_make_aware(self, dt): - return maybe_make_aware(dt, self.tz) + def maybe_make_aware(self, dt, naive_as_utc=True): + return maybe_make_aware(dt, self.tz, naive_as_utc=naive_as_utc) @property def app(self): @@ -468,9 +468,8 @@ def day_out_of_range(year, month, day): return False def is_before_last_run(year, month, day): - return self.maybe_make_aware(datetime(year, - month, - day)) < last_run_at + return self.maybe_make_aware(datetime(year, month, day, next_hour, next_minute), + naive_as_utc=False) < last_run_at def roll_over(): for _ in range(2000): diff --git a/celery/utils/time.py b/celery/utils/time.py index 6b5813ebdf8..ed4008c6e48 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -305,10 +305,11 @@ def to_utc(dt): return make_aware(dt, timezone.utc) -def maybe_make_aware(dt, tz=None): +def maybe_make_aware(dt, tz=None, naive_as_utc=True): """Convert dt to aware datetime, do nothing if dt is already aware.""" if is_naive(dt): - dt = to_utc(dt) + if naive_as_utc: + dt = to_utc(dt) return localize( dt, timezone.utc if tz is None else timezone.tz_or_local(tz), ) diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index d6f555c2cf2..4fc91113dfa 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -472,6 +472,26 @@ def test_day_after_dst_start(self): assert next.utcoffset().seconds == 7200 assert next == tz.localize(datetime(2017, 3, 26, 9, 0)) + def test_negative_utc_timezone_with_day_of_month(self): + # UTC-8 + tzname = "America/Los_Angeles" + self.app.timezone = tzname + tz = pytz.timezone(tzname) + + # set day_of_month to test on _delta_to_next + crontab = self.crontab(minute=0, day_of_month='27-31') + + # last_run_at: '2023/01/28T23:00:00-08:00' + last_run_at = tz.localize(datetime(2023, 1, 28, 23, 0)) + + # now: '2023/01/29T00:00:00-08:00' + now = tz.localize(datetime(2023, 1, 29, 0, 0)) + + crontab.nowfun = lambda: now + next = now + crontab.remaining_estimate(last_run_at) + + assert next == tz.localize(datetime(2023, 1, 29, 0, 0)) + class test_crontab_is_due: @@ -933,3 +953,22 @@ def test_execution_not_due_if_last_run_past_last_feasible_time(self): due, remaining = self.daily.is_due(last_run) assert remaining == expected_remaining assert not due + + def test_execution_due_for_negative_utc_timezone_with_day_of_month(self): + # UTC-8 + tzname = "America/Los_Angeles" + self.app.timezone = tzname + tz = pytz.timezone(tzname) + + # set day_of_month to test on _delta_to_next + crontab = self.crontab(minute=0, day_of_month='27-31') + + # last_run_at: '2023/01/28T23:00:00-08:00' + last_run_at = tz.localize(datetime(2023, 1, 28, 23, 0)) + + # now: '2023/01/29T00:00:00-08:00' + now = tz.localize(datetime(2023, 1, 29, 0, 0)) + + with patch_crontab_nowfun(crontab, now): + due, remaining = crontab.is_due(last_run_at) + assert (due, remaining) == (True, 3600) From c8e10e51a3348d1a9d8eaf58f656333549f04424 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 9 Feb 2023 16:25:10 +0200 Subject: [PATCH 0302/1051] Stamping Mechanism Examples Refactoring (#8060) * renamed: examples/stamping/shell.py -> examples/stamping/revoke_example.py * Added examples/stamping/nested_replace_example.py * Fixed revoke_example.py * Cleanup --- examples/stamping/myapp.py | 22 ++++---- examples/stamping/nested_replace_example.py | 9 ++++ .../stamping/{shell.py => revoke_example.py} | 4 +- examples/stamping/tasks.py | 30 +++++++++-- examples/stamping/visitors.py | 50 +++++++++++++++++-- 5 files changed, 91 insertions(+), 24 deletions(-) create mode 100644 examples/stamping/nested_replace_example.py rename examples/stamping/{shell.py => revoke_example.py} (94%) diff --git a/examples/stamping/myapp.py b/examples/stamping/myapp.py index 833939f7359..df317ce00a5 100644 --- a/examples/stamping/myapp.py +++ b/examples/stamping/myapp.py @@ -37,16 +37,14 @@ @task_received.connect -def task_received_handler( - sender=None, - request=None, - signal=None, - **kwargs -): - print(f'In {signal.name} for: {repr(request)}') - print(f'Found stamps: {request.stamped_headers}') - print(json.dumps(request.stamps, indent=4, sort_keys=True)) - - -if __name__ == '__main__': +def task_received_handler(sender=None, request=None, signal=None, **kwargs): + print(f"In {signal.name} for: {repr(request)}") + if hasattr(request, "stamped_headers") and request.stamped_headers: + print(f"Found stamps: {request.stamped_headers}") + print(json.dumps(request.stamps, indent=4, sort_keys=True)) + else: + print("No stamps found") + + +if __name__ == "__main__": app.start() diff --git a/examples/stamping/nested_replace_example.py b/examples/stamping/nested_replace_example.py new file mode 100644 index 00000000000..7cbec9a33e2 --- /dev/null +++ b/examples/stamping/nested_replace_example.py @@ -0,0 +1,9 @@ +from tasks import identity, identity_task + +from celery import group + + +def run_example(): + canvas = identity.s("task") + canvas.link(identity_task.s() | group(identity_task.s(), identity_task.s())) + canvas.delay() diff --git a/examples/stamping/shell.py b/examples/stamping/revoke_example.py similarity index 94% rename from examples/stamping/shell.py rename to examples/stamping/revoke_example.py index 3d2b48bb1a3..728131b76ef 100644 --- a/examples/stamping/shell.py +++ b/examples/stamping/revoke_example.py @@ -1,6 +1,6 @@ from time import sleep -from tasks import identity, mul, wait_for_revoke, xsum +from tasks import identity_task, mul, wait_for_revoke, xsum from visitors import MonitoringIdStampingVisitor from celery.canvas import Signature, chain, chord, group @@ -12,7 +12,7 @@ def create_canvas(n: int) -> Signature: For example, if n = 3, the result is 3 * (1 + 2 + 3) * 10 = 180 """ canvas = chain( - group(identity.s(i) for i in range(1, n+1)) | xsum.s(), + group(identity_task.s(i) for i in range(1, n+1)) | xsum.s(), chord(group(mul.s(10) for _ in range(1, n+1)), xsum.s()), ) diff --git a/examples/stamping/tasks.py b/examples/stamping/tasks.py index 0cb3e113809..bdc2c20fd4d 100644 --- a/examples/stamping/tasks.py +++ b/examples/stamping/tasks.py @@ -1,25 +1,45 @@ from time import sleep from config import app +from visitors import FullVisitor, MonitoringIdStampingVisitor, MyStampingVisitor from celery import Task -from examples.stamping.visitors import MyStampingVisitor +from celery.canvas import Signature class MyTask(Task): """Custom task for stamping on replace""" - def on_replace(self, sig): + def on_replace(self, sig: Signature): sig.stamp(MyStampingVisitor()) return super().on_replace(sig) @app.task -def identity(x): +def identity_task(x): """Identity function""" + # When used from identity(), this task will be stamped with: + # - FullVisitor: Stamps per canvas primitive: + # e.g: on_signature: { + # "on_signature": "FullVisitor.on_signature()", + # } + # - MyStampingVisitor: {"mystamp": "I am a stamp!"} + # - MonitoringIdStampingVisitor: {"monitoring_id": str(uuid4())} return x +@app.task(bind=True) +def replaced_identity(self: Task, x): + # Adds stamps to identity_task from: MonitoringIdStampingVisitor + return self.replace(identity_task.s(x), visitor=MonitoringIdStampingVisitor()) + + +@app.task(bind=True, base=MyTask) +def identity(self: Task, x): + # Adds stamps to replaced_identity from: FullVisitor and MyStampingVisitor + return self.replace(replaced_identity.s(x), visitor=FullVisitor()) + + @app.task def mul(x: int, y: int) -> int: """Multiply two numbers""" @@ -35,10 +55,10 @@ def xsum(numbers: list) -> int: @app.task def waitfor(seconds: int) -> None: """Wait for "seconds" seconds, ticking every second.""" - print(f'Waiting for {seconds} seconds...') + print(f"Waiting for {seconds} seconds...") for i in range(seconds): sleep(1) - print(f'{i+1} seconds passed') + print(f"{i+1} seconds passed") @app.task(bind=True, base=MyTask) diff --git a/examples/stamping/visitors.py b/examples/stamping/visitors.py index 0b7e462014f..4f297bec257 100644 --- a/examples/stamping/visitors.py +++ b/examples/stamping/visitors.py @@ -1,14 +1,54 @@ from uuid import uuid4 -from celery.canvas import StampingVisitor +from celery.canvas import Signature, StampingVisitor class MyStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {'mystamp': 'I am a stamp!'} + def on_signature(self, sig: Signature, **headers) -> dict: + return {"mystamp": "I am a stamp!"} class MonitoringIdStampingVisitor(StampingVisitor): + def on_signature(self, sig: Signature, **headers) -> dict: + return {"monitoring_id": str(uuid4())} - def on_signature(self, sig, **headers) -> dict: - return {'monitoring_id': str(uuid4())} + +class FullVisitor(StampingVisitor): + def on_signature(self, sig: Signature, **headers) -> dict: + return { + "on_signature": "FullVisitor.on_signature()", + } + + def on_callback(self, sig, **headers) -> dict: + return { + "on_callback": "FullVisitor.on_callback()", + } + + def on_errback(self, sig, **headers) -> dict: + return { + "on_errback": "FullVisitor.on_errback()", + } + + def on_chain_start(self, sig: Signature, **headers) -> dict: + return { + "on_chain_start": "FullVisitor.on_chain_start()", + } + + def on_group_start(self, sig: Signature, **headers) -> dict: + return { + "on_group_start": "FullVisitor.on_group_start()", + } + + def on_chord_header_start(self, sig: Signature, **headers) -> dict: + s = super().on_chord_header_start(sig, **headers) + s.update( + { + "on_chord_header_start": "FullVisitor.on_chord_header_start()", + } + ) + return s + + def on_chord_body(self, sig: Signature, **headers) -> dict: + return { + "on_chord_body": "FullVisitor.on_chord_body()", + } From 6d13ec8f6033bc95bb9dbfeb1e3460394c59944d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 9 Feb 2023 17:46:05 +0200 Subject: [PATCH 0303/1051] Fixed bug in Task.on_stamp_replaced() (#8061) * Added unit test: test_on_stamp_replaced() * Fixed bug in Task.on_stamp_replaced() --- celery/app/task.py | 9 +++- t/unit/tasks/test_stamping.py | 80 ++++++++++++++++++++++++++++++++++- 2 files changed, 86 insertions(+), 3 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index d77952f6674..c07dcfb9975 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -1086,7 +1086,7 @@ def on_stamp_replaced(self, sig, visitor=None): sig (Signature): signature to replace with. visitor (StampingVisitor): Visitor API object. """ - stamps = {} + headers = {} # If the original task had stamps if self.request.stamps: @@ -1100,9 +1100,14 @@ def on_stamp_replaced(self, sig, visitor=None): # with stamping a single header stamp to always be a flattened stamp = stamp[0] if len(stamp) == 1 else stamp stamps[header] = stamp + stamped_headers = self.request.stamped_headers + headers.update(stamps) + headers["stamped_headers"] = stamped_headers if visitor: # This check avoids infinite recursion when the visitor is None - sig.stamp(visitor=visitor, **stamps) + sig.stamp(visitor=visitor, **headers) + elif headers: + sig.stamp(**headers) def on_replace(self, sig): """Handler called when the task is replaced. diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index 02f4d54ba28..5931174dfa4 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -609,7 +609,7 @@ def test_stamping_with_replace(self, workflow: Signature, stamping_visitor: Stam class AssertionTask(Task): def on_stamp_replaced(self, sig: Signature, visitor=None): - return super().on_stamp_replaced(sig, visitor=stamping_visitor) + super().on_stamp_replaced(sig, visitor=stamping_visitor) def on_replace(self, sig: Signature): nonlocal assertion_result @@ -1227,3 +1227,81 @@ def test_chord_stamping_one_level(self, subtests): with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) + + @pytest.mark.usefixtures("depends_on_current_app") + def test_on_stamp_replaced_with_visitor(self): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + class CustomStampingVisitor2(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header2": "value2"} + + mytask = self.app.task(shared=False)(return_True) + + class AssertionTask(Task): + def on_stamp_replaced(self, sig: Signature, visitor=None): + assert "stamped_headers" not in sig.options + assert "header" not in sig.options + assert "header2" not in sig.options + # Here we make sure sig received the stamps from stamp_using_replace and assert_using_replace + # using the replace via on_stamp_replaced() + super().on_stamp_replaced(sig, visitor=visitor) + assert sorted(sig.options["stamped_headers"]) == sorted(["header", "header2"]) + assert sig.options["header"] == "value" + assert sig.options["header2"] == "value2" + + @self.app.task(shared=False, bind=True, base=AssertionTask) + def assert_using_replace(self: AssertionTask): + assert self.request.stamped_headers == ["header"] + assert self.request.stamps["header"] == ["value"] + return self.replace(mytask.s(), visitor=CustomStampingVisitor2()) + + @self.app.task(shared=False, bind=True) + def stamp_using_replace(self: Task): + assert self.request.stamped_headers is None, "stamped_headers should not be set" + assert self.request.stamps is None, "stamps should not be set" + return self.replace(assert_using_replace.s(), visitor=CustomStampingVisitor()) + + replaced_sig = group(stamp_using_replace.s(), self.add.s(1, 1)) | self.add.s(2, 2) + replaced_sig.apply() + + @pytest.mark.usefixtures("depends_on_current_app") + def test_on_stamp_replaced_without_visitor(self): + self.app.conf.task_always_eager = True + self.app.conf.task_store_eager_result = True + self.app.conf.result_extended = True + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"header": "value"} + + mytask = self.app.task(shared=False)(return_True) + + class AssertionTask(Task): + def on_stamp_replaced(self, sig: Signature, visitor=None): + assert "stamped_headers" not in sig.options + assert "header" not in sig.options + super().on_stamp_replaced(sig, visitor=visitor) + assert sig.options["stamped_headers"] == ["header"] + assert sig.options["header"] == "value" + + @self.app.task(shared=False, bind=True, base=AssertionTask) + def assert_using_replace(self: AssertionTask): + assert self.request.stamped_headers == ["header"] + assert self.request.stamps["header"] == ["value"] + return self.replace(mytask.s(), visitor=None) + + @self.app.task(shared=False, bind=True) + def stamp_using_replace(self: Task): + assert self.request.stamped_headers is None, "stamped_headers should not be set" + assert self.request.stamps is None, "stamps should not be set" + return self.replace(assert_using_replace.s(), visitor=CustomStampingVisitor()) + + replaced_sig = group(stamp_using_replace.s(), self.add.s(1, 1)) | self.add.s(2, 2) + replaced_sig.apply() From 0907e868eb7cc0ae4b3219c9b09e73ac5b399907 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Feb 2023 17:26:16 +0000 Subject: [PATCH 0304/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v0.991 → v1.0.0](https://github.com/pre-commit/mirrors-mypy/compare/v0.991...v1.0.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89da9e05051..6fb81019a8a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.991 + rev: v1.0.0 hooks: - id: mypy pass_filenames: false From 082a1a8c19f430e0f7d8a4271a04f56d29b0af73 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 15 Feb 2023 17:58:55 +0200 Subject: [PATCH 0305/1051] Stamping Mechanism Refactoring 2 (#8064) * Removed stamping from Task.replace() - previous impl was based on wrong assumptions of what is Task.replace() * Removed special stamping sync developed for Task.replace() workflow - previous impl was based on wrong assumptions of what is Task.replace() * Added "append_stamps" argument to stamp() to decide what to do with duplicated stamps * Refactored self_headers() * Refactored examples/stamping and added new examples * Refactored stamp_links() for better code readability * Cleanup * Doc fixes --- celery/app/task.py | 42 +----- celery/canvas.py | 153 ++++++++++++-------- examples/stamping/examples.py | 46 ++++++ examples/stamping/myapp.py | 5 +- examples/stamping/nested_replace_example.py | 9 -- examples/stamping/tasks.py | 78 +++++++--- examples/stamping/visitors.py | 15 +- t/unit/tasks/test_stamping.py | 104 ++----------- 8 files changed, 221 insertions(+), 231 deletions(-) create mode 100644 examples/stamping/examples.py delete mode 100644 examples/stamping/nested_replace_example.py diff --git a/celery/app/task.py b/celery/app/task.py index c07dcfb9975..5a12c6df004 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -896,7 +896,7 @@ def send_event(self, type_, retry=True, retry_policy=None, **fields): type_, uuid=req.id, retry=retry, retry_policy=retry_policy, **fields) - def replace(self, sig, visitor=None): + def replace(self, sig): """Replace this task, with a new task inheriting the task id. Execution of the host task ends immediately and no subsequent statements @@ -904,11 +904,6 @@ def replace(self, sig, visitor=None): .. versionadded:: 4.0 - .. versionchanged:: 5.3 - Added new ``visitor`` argument, which is used when the task is - replaced to stamp the replaced task with the visitor's stamps. - In addition, any previous stamps will be passed to the replaced task. - Arguments: sig (Signature): signature to replace with. visitor (StampingVisitor): Visitor API object. @@ -958,8 +953,6 @@ def replace(self, sig, visitor=None): # retain their original task IDs as well for t in reversed(self.request.chain or []): sig |= signature(t, app=self.app) - # Stamping sig with parents groups - self.on_stamp_replaced(sig, visitor) return self.on_replace(sig) def add_to_chord(self, sig, lazy=False): @@ -1076,39 +1069,6 @@ def after_return(self, status, retval, task_id, args, kwargs, einfo): None: The return value of this handler is ignored. """ - def on_stamp_replaced(self, sig, visitor=None): - """Handler called when the task is replaced and passes - the stamps from the original task to the replaced task. - - .. versionadded:: 5.3 - - Arguments: - sig (Signature): signature to replace with. - visitor (StampingVisitor): Visitor API object. - """ - headers = {} - - # If the original task had stamps - if self.request.stamps: - # Copy the stamps to the new signature - stamps = self.request.stamps.copy() - for header, stamp in stamps.items(): - # The request will contain single stamps as a list of one element so we need to unpack them to - # keep consistency with stamping with a header of a single stamp (which will not be a list - # implicitly like in the request) - # This will also flat stamps that were originally a list of a single stamp to create consistency - # with stamping a single header stamp to always be a flattened - stamp = stamp[0] if len(stamp) == 1 else stamp - stamps[header] = stamp - stamped_headers = self.request.stamped_headers - headers.update(stamps) - headers["stamped_headers"] = stamped_headers - - if visitor: # This check avoids infinite recursion when the visitor is None - sig.stamp(visitor=visitor, **headers) - elif headers: - sig.stamp(**headers) - def on_replace(self, sig): """Handler called when the task is replaced. diff --git a/celery/canvas.py b/celery/canvas.py index 900a46b8518..faa70f4ecd6 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -61,12 +61,12 @@ def task_name_from(task): return getattr(task, 'name', task) -def _stamp_regen_task(task, visitor, **headers): +def _stamp_regen_task(task, visitor, append_stamps, **headers): """When stamping a sequence of tasks created by a generator, we use this function to stamp each task in the generator without exhausting it.""" - task.stamp(visitor=visitor, **headers) + task.stamp(visitor, append_stamps, **headers) return task @@ -550,39 +550,26 @@ def set(self, immutable=None, **options): def set_immutable(self, immutable): self.immutable = immutable - def stamp(self, visitor=None, **headers): - """Stamp this signature with additional custom headers. - Using a visitor will pass on responsibility for the stamping - to the visitor. - - .. versionadded:: 5.3 - - Arguments: - visitor (StampingVisitor): Visitor API object. - headers (Dict): Stamps that should be added to headers. - """ - self.stamp_links(visitor, **headers) - headers = headers.copy() - visitor_headers = None - if visitor is not None: - visitor_headers = visitor.on_signature(self, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - return self.set(**headers) - - def _stamp_headers(self, visitor_headers=None, **headers): - """ Collect all stamps from visitor, headers and self, + def _stamp_headers(self, visitor_headers=None, append_stamps=True, self_headers=True, **headers): + """Collect all stamps from visitor, headers and self, and return an idempotent dictionary of stamps. .. versionadded:: 5.3 Arguments: visitor_headers (Dict): Stamps from a visitor method. + append_stamps (bool): + If True, duplicated stamps will be appended to a list. + If False, duplicated stamps will be replaced by the last stamp. + self_headers (bool): + If True, stamps from self.options will be added. + If False, stamps from self.options will be ignored. headers (Dict): Stamps that should be added to headers. Returns: Dict: Merged stamps. """ - # Use aggregate_duplicates=False to prioritize visitor_headers over headers in case of duplicated stamps. + # Use append_stamps=True to prioritize visitor_headers over headers in case of duplicated stamps. # This will lose duplicated headers from the headers argument, but that is the best effort solution # to avoid implicitly casting the duplicated stamp into a list of both stamps from headers and # visitor_headers of the same key. @@ -593,47 +580,77 @@ def _stamp_headers(self, visitor_headers=None, **headers): # headers["foo"] == ["bar1", "bar2"] -> The stamp is now a list # _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=False) # headers["foo"] == "bar2" -> "bar1" is lost, but the stamp is according to the visitor - aggregate_duplicates = False headers = headers.copy() + + if "stamped_headers" not in headers: + headers["stamped_headers"] = list(headers.keys()) + # Merge headers with visitor headers if visitor_headers is not None: visitor_headers = visitor_headers or {} if "stamped_headers" not in visitor_headers: visitor_headers["stamped_headers"] = list(visitor_headers.keys()) - # Prioritize visitor_headers over headers - _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=aggregate_duplicates) + + # Sync from visitor + _merge_dictionaries(headers, visitor_headers, aggregate_duplicates=append_stamps) headers["stamped_headers"] = list(set(headers["stamped_headers"])) + # Merge headers with self.options - else: - headers["stamped_headers"] = [ - header for header in headers.keys() - if header not in self.options and header != "stamped_headers" - ] - - # Prioritize self.options over headers - _merge_dictionaries(headers, self.options, aggregate_duplicates=aggregate_duplicates) - - # Sync missing stamps from self.options (relevant for stamping during task replacement) - stamped_headers = set(headers.get("stamped_headers", [])) - stamped_headers.update(self.options.get("stamped_headers", [])) - headers["stamped_headers"] = list(stamped_headers) - for previous_header in stamped_headers: - if previous_header not in headers and previous_header in self.options: - headers[previous_header] = self.options[previous_header] + if self_headers: + stamped_headers = set(headers.get("stamped_headers", [])) + stamped_headers.update(self.options.get("stamped_headers", [])) + headers["stamped_headers"] = list(stamped_headers) + # Only merge stamps that are in stamped_headers from self.options + redacted_options = {k: v for k, v in self.options.items() if k in headers["stamped_headers"]} + + # Sync from self.options + _merge_dictionaries(headers, redacted_options, aggregate_duplicates=append_stamps) + headers["stamped_headers"] = list(set(headers["stamped_headers"])) + return headers - def stamp_links(self, visitor, **headers): + def stamp(self, visitor=None, append_stamps=True, **headers): + """Stamp this signature with additional custom headers. + Using a visitor will pass on responsibility for the stamping + to the visitor. + + .. versionadded:: 5.3 + + Arguments: + visitor (StampingVisitor): Visitor API object. + append_stamps (bool): + If True, duplicated stamps will be appended to a list. + If False, duplicated stamps will be replaced by the last stamp. + headers (Dict): Stamps that should be added to headers. + """ + self.stamp_links(visitor, append_stamps, **headers) + headers = headers.copy() + visitor_headers = None + if visitor is not None: + visitor_headers = visitor.on_signature(self, **headers) or {} + headers = self._stamp_headers(visitor_headers, append_stamps, **headers) + return self.set(**headers) + + def stamp_links(self, visitor, append_stamps=True, **headers): """Stamp this signature links (callbacks and errbacks). Using a visitor will pass on responsibility for the stamping to the visitor. Arguments: visitor (StampingVisitor): Visitor API object. + append_stamps (bool): + If True, duplicated stamps will be appended to a list. + If False, duplicated stamps will be replaced by the last stamp. headers (Dict): Stamps that should be added to headers. """ non_visitor_headers = headers.copy() + # When we are stamping links, we want to avoid adding stamps from the linked signature itself + # so we turn off self_headers to stamp the link only with the visitor and the headers. + # If it's enabled, the link copies the stamps of the linked signature, and we don't want that. + self_headers = False + # Stamp all of the callbacks of this signature headers = deepcopy(non_visitor_headers) for link in self.options.get('link', []) or []: @@ -641,8 +658,13 @@ def stamp_links(self, visitor, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_callback(link, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - link.stamp(visitor=visitor, **headers) + headers = self._stamp_headers( + visitor_headers=visitor_headers, + append_stamps=append_stamps, + self_headers=self_headers, + **headers + ) + link.stamp(visitor, append_stamps, **headers) # Stamp all of the errbacks of this signature headers = deepcopy(non_visitor_headers) @@ -651,8 +673,13 @@ def stamp_links(self, visitor, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_errback(link, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - link.stamp(visitor=visitor, **headers) + headers = self._stamp_headers( + visitor_headers=visitor_headers, + append_stamps=append_stamps, + self_headers=self_headers, + **headers + ) + link.stamp(visitor, append_stamps, **headers) def _with_list_option(self, key): """Gets the value at the given self.options[key] as a list. @@ -1067,15 +1094,15 @@ def freeze(self, _id=None, group_id=None, chord=None, ) return results[0] - def stamp(self, visitor=None, **headers): + def stamp(self, visitor=None, append_stamps=True, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_chain_start(self, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) + headers = self._stamp_headers(visitor_headers, append_stamps, **headers) self.stamp_links(visitor, **headers) for task in self.tasks: - task.stamp(visitor=visitor, **headers) + task.stamp(visitor, append_stamps, **headers) if visitor is not None: visitor.on_chain_end(self, **headers) @@ -1603,20 +1630,20 @@ def set_immutable(self, immutable): for task in self.tasks: task.set_immutable(immutable) - def stamp(self, visitor=None, **headers): + def stamp(self, visitor=None, append_stamps=True, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_group_start(self, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - self.stamp_links(visitor, **headers) + headers = self._stamp_headers(visitor_headers, append_stamps, **headers) + self.stamp_links(visitor, append_stamps, **headers) if isinstance(self.tasks, _regen): - self.tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) + self.tasks.map(_partial(_stamp_regen_task, visitor=visitor, append_stamps=append_stamps, **headers)) else: new_tasks = [] for task in self.tasks: task = maybe_signature(task, app=self.app) - task.stamp(visitor=visitor, **headers) + task.stamp(visitor, append_stamps, **headers) new_tasks.append(task) if isinstance(self.tasks, MutableSequence): self.tasks[:] = new_tasks @@ -2060,7 +2087,7 @@ def freeze(self, _id=None, group_id=None, chord=None, return body_result - def stamp(self, visitor=None, **headers): + def stamp(self, visitor=None, append_stamps=True, **headers): tasks = self.tasks if isinstance(tasks, group): tasks = tasks.tasks @@ -2068,23 +2095,23 @@ def stamp(self, visitor=None, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_chord_header_start(self, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - self.stamp_links(visitor, **headers) + headers = self._stamp_headers(visitor_headers, append_stamps, **headers) + self.stamp_links(visitor, append_stamps, **headers) if isinstance(tasks, _regen): - tasks.map(_partial(_stamp_regen_task, visitor=visitor, **headers)) + tasks.map(_partial(_stamp_regen_task, visitor=visitor, append_stamps=append_stamps, **headers)) else: stamps = headers.copy() for task in tasks: - task.stamp(visitor=visitor, **stamps) + task.stamp(visitor, append_stamps, **stamps) if visitor is not None: visitor.on_chord_header_end(self, **headers) if visitor is not None and self.body is not None: visitor_headers = visitor.on_chord_body(self, **headers) or {} - headers = self._stamp_headers(visitor_headers, **headers) - self.body.stamp(visitor=visitor, **headers) + headers = self._stamp_headers(visitor_headers, append_stamps, **headers) + self.body.stamp(visitor, append_stamps, **headers) def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, publisher=None, connection=None, diff --git a/examples/stamping/examples.py b/examples/stamping/examples.py new file mode 100644 index 00000000000..f20ca38e86f --- /dev/null +++ b/examples/stamping/examples.py @@ -0,0 +1,46 @@ +from tasks import identity, identity_task +from visitors import FullVisitor, MonitoringIdStampingVisitor + +from celery import chain, group + + +def run_example1(): + s1 = chain(identity_task.si("foo11"), identity_task.si("foo12")) + s1.link(identity_task.si("link_foo1")) + s1.link_error(identity_task.si("link_error_foo1")) + + s2 = chain(identity_task.si("foo21"), identity_task.si("foo22")) + s2.link(identity_task.si("link_foo2")) + s2.link_error(identity_task.si("link_error_foo2")) + + canvas = group([s1, s2]) + canvas.stamp(MonitoringIdStampingVisitor()) + canvas.delay() + + +def run_example2(): + sig1 = identity_task.si("sig1") + sig1.link(identity_task.si("sig1_link")) + sig2 = identity_task.si("sig2") + sig2.link(identity_task.si("sig2_link")) + s1 = chain(sig1, sig2) + s1.link(identity_task.si("chain_link")) + s1.stamp(FullVisitor()) + s1.stamp(MonitoringIdStampingVisitor()) + s1.delay() + + +def run_example3(): + sig1 = identity_task.si("sig1") + sig1_link = identity_task.si("sig1_link") + sig1.link(sig1_link) + sig1_link.stamp(FullVisitor()) + sig1_link.stamp(MonitoringIdStampingVisitor()) + sig1.stamp(MonitoringIdStampingVisitor()) + sig1.delay() + + +def run_example_with_replace(): + sig1 = identity.si("sig1") + sig1.link(identity_task.si("sig1_link")) + sig1.delay() diff --git a/examples/stamping/myapp.py b/examples/stamping/myapp.py index df317ce00a5..ee21a0b25ba 100644 --- a/examples/stamping/myapp.py +++ b/examples/stamping/myapp.py @@ -16,14 +16,15 @@ # The shell service is used to run the example. (window2)$ celery -A myapp shell - # Use (copy) the content of shell.py to run the workflow via the + # Use (copy) the content of the examples modules to run the workflow via the # shell service. - # Use one of two demo runs via the shell service: + # Use one of demo runs via the shell service: # 1) run_then_revoke(): Run the workflow and revoke the last task # by its stamped header during its run. # 2) revoke_then_run(): Revoke the last task by its stamped header # before its run, then run the workflow. + # 3) Any of the examples in examples.py # # See worker logs for output per defined in task_received_handler(). """ diff --git a/examples/stamping/nested_replace_example.py b/examples/stamping/nested_replace_example.py deleted file mode 100644 index 7cbec9a33e2..00000000000 --- a/examples/stamping/nested_replace_example.py +++ /dev/null @@ -1,9 +0,0 @@ -from tasks import identity, identity_task - -from celery import group - - -def run_example(): - canvas = identity.s("task") - canvas.link(identity_task.s() | group(identity_task.s(), identity_task.s())) - canvas.delay() diff --git a/examples/stamping/tasks.py b/examples/stamping/tasks.py index bdc2c20fd4d..abf215dadf4 100644 --- a/examples/stamping/tasks.py +++ b/examples/stamping/tasks.py @@ -4,40 +4,77 @@ from visitors import FullVisitor, MonitoringIdStampingVisitor, MyStampingVisitor from celery import Task -from celery.canvas import Signature +from celery.canvas import Signature, maybe_signature +from celery.utils.log import get_task_logger +logger = get_task_logger(__name__) -class MyTask(Task): + +def log_demo(running_task): + request, name = running_task.request, running_task.name + running_task.request.argsrepr + if hasattr(request, "stamps"): + stamps = request.stamps or {} + stamped_headers = request.stamped_headers or [] + + if stamps and stamped_headers: + logger.critical(f"Found {name}.stamps: {stamps}") + logger.critical(f"Found {name}.stamped_headers: {stamped_headers}") + else: + logger.critical(f"Running {name} without stamps") + + links = request.callbacks or [] + for link in links: + link = maybe_signature(link) + logger.critical(f"Found {name}.link: {link}") + stamped_headers = link.options.get("stamped_headers", []) + stamps = {stamp: link.options[stamp] for stamp in stamped_headers} + + if stamps and stamped_headers: + logger.critical(f"Found {name}.link stamps: {stamps}") + logger.critical(f"Found {name}.link stamped_headers: {stamped_headers}") + else: + logger.critical(f"Running {name}.link without stamps") + + +class StampOnReplace(Task): """Custom task for stamping on replace""" def on_replace(self, sig: Signature): + logger.warning(f"StampOnReplace: {sig}.stamp(FullVisitor())") + sig.stamp(FullVisitor()) + logger.warning(f"StampOnReplace: {sig}.stamp(MyStampingVisitor())") sig.stamp(MyStampingVisitor()) return super().on_replace(sig) -@app.task -def identity_task(x): +class MonitoredTask(Task): + def on_replace(self, sig: Signature): + logger.warning(f"MonitoredTask: {sig}.stamp(MonitoringIdStampingVisitor())") + sig.stamp(MonitoringIdStampingVisitor(), append_stamps=False) + return super().on_replace(sig) + + +@app.task(bind=True) +def identity_task(self, x): """Identity function""" - # When used from identity(), this task will be stamped with: - # - FullVisitor: Stamps per canvas primitive: - # e.g: on_signature: { - # "on_signature": "FullVisitor.on_signature()", - # } - # - MyStampingVisitor: {"mystamp": "I am a stamp!"} - # - MonitoringIdStampingVisitor: {"monitoring_id": str(uuid4())} + log_demo(self) return x -@app.task(bind=True) -def replaced_identity(self: Task, x): - # Adds stamps to identity_task from: MonitoringIdStampingVisitor - return self.replace(identity_task.s(x), visitor=MonitoringIdStampingVisitor()) +@app.task(bind=True, base=MonitoredTask) +def replaced_identity(self: MonitoredTask, x): + log_demo(self) + logger.warning("Stamping identity_task with MonitoringIdStampingVisitor() before replace") + replaced_task = identity_task.s(x) + # These stamps should be overridden by the stamps from MonitoredTask.on_replace() + replaced_task.stamp(MonitoringIdStampingVisitor()) + return self.replace(replaced_task) -@app.task(bind=True, base=MyTask) +@app.task(bind=True, base=StampOnReplace) def identity(self: Task, x): - # Adds stamps to replaced_identity from: FullVisitor and MyStampingVisitor - return self.replace(replaced_identity.s(x), visitor=FullVisitor()) + log_demo(self) + return self.replace(replaced_identity.s(x)) @app.task @@ -61,8 +98,7 @@ def waitfor(seconds: int) -> None: print(f"{i+1} seconds passed") -@app.task(bind=True, base=MyTask) -def wait_for_revoke(self: MyTask, seconds: int) -> None: +@app.task(bind=True, base=StampOnReplace) +def wait_for_revoke(self: StampOnReplace, seconds: int) -> None: """Replace this task with a new task that waits for "seconds" seconds.""" - # This will stamp waitfor with MyStampingVisitor self.replace(waitfor.s(seconds)) diff --git a/examples/stamping/visitors.py b/examples/stamping/visitors.py index 4f297bec257..814c88c3ecc 100644 --- a/examples/stamping/visitors.py +++ b/examples/stamping/visitors.py @@ -1,45 +1,57 @@ from uuid import uuid4 from celery.canvas import Signature, StampingVisitor +from celery.utils.log import get_task_logger + +logger = get_task_logger(__name__) class MyStampingVisitor(StampingVisitor): def on_signature(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: mystamp") return {"mystamp": "I am a stamp!"} class MonitoringIdStampingVisitor(StampingVisitor): def on_signature(self, sig: Signature, **headers) -> dict: - return {"monitoring_id": str(uuid4())} + mtask_id = str(uuid4()) + logger.critical(f"Visitor: Sig '{sig}' is stamped with: {mtask_id}") + return {"mtask_id": mtask_id} class FullVisitor(StampingVisitor): def on_signature(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_signature") return { "on_signature": "FullVisitor.on_signature()", } def on_callback(self, sig, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_callback") return { "on_callback": "FullVisitor.on_callback()", } def on_errback(self, sig, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_errback") return { "on_errback": "FullVisitor.on_errback()", } def on_chain_start(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_chain_start") return { "on_chain_start": "FullVisitor.on_chain_start()", } def on_group_start(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_group_start") return { "on_group_start": "FullVisitor.on_group_start()", } def on_chord_header_start(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_chord_header_start") s = super().on_chord_header_start(sig, **headers) s.update( { @@ -49,6 +61,7 @@ def on_chord_header_start(self, sig: Signature, **headers) -> dict: return s def on_chord_body(self, sig: Signature, **headers) -> dict: + logger.critical(f"Visitor: Sig '{sig}' is stamped with: on_chord_body") return { "on_chord_body": "FullVisitor.on_chord_body()", } diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index 5931174dfa4..a4cd953f3fb 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -576,7 +576,7 @@ class test_canvas_stamping(CanvasCase): def stamped_canvas(self, stamping_visitor: StampingVisitor, canvas_workflow: Signature) -> Signature: workflow = canvas_workflow.clone() workflow.stamp(CleanupVisitor()) - workflow.stamp(stamping_visitor) + workflow.stamp(stamping_visitor, append_stamps=False) return workflow @pytest.fixture @@ -584,7 +584,7 @@ def stamped_linked_canvas(self, stamping_visitor: StampingVisitor, canvas_workfl workflow = canvas_workflow.clone() workflow.stamp(CleanupVisitor()) workflow.stamp(LinkingVisitor()) - workflow.stamp(stamping_visitor) + workflow.stamp(stamping_visitor, append_stamps=False) return workflow @pytest.fixture(params=["stamped_canvas", "stamped_linked_canvas"]) @@ -603,32 +603,26 @@ def test_stamping_headers_in_options(self, workflow: Signature, stamping_visitor @pytest.mark.usefixtures("depends_on_current_app") def test_stamping_with_replace(self, workflow: Signature, stamping_visitor: StampingVisitor, subtests): - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - class AssertionTask(Task): - def on_stamp_replaced(self, sig: Signature, visitor=None): - super().on_stamp_replaced(sig, visitor=stamping_visitor) - def on_replace(self, sig: Signature): nonlocal assertion_result - sig.stamp(StampsAssertionVisitor(stamping_visitor, subtests)) - sig.stamp(StampedHeadersAssertionVisitor(stamping_visitor, subtests)) assertion_result = True return super().on_replace(sig) @self.app.task(shared=False, bind=True, base=AssertionTask) def assert_using_replace(self: AssertionTask): - assert self.request.stamped_headers is not None, "stamped_headers should be set" - assert self.request.stamps is not None, "stamps should be set" + assert self.request.stamped_headers is None, "stamped_headers should not pass via replace" + assert self.request.stamps is None, "stamps should not pass via replace" return self.replace(workflow) @self.app.task(shared=False, bind=True) def stamp_using_replace(self: Task): - return self.replace(assert_using_replace.s(), visitor=stamping_visitor) + assert self.request.stamped_headers is not None + assert self.request.stamps is not None + return self.replace(assert_using_replace.s()) replaced_sig = stamp_using_replace.s() + replaced_sig.stamp(stamping_visitor, append_stamps=False) assertion_result = False replaced_sig.apply() assert assertion_result @@ -691,7 +685,7 @@ def test_twice_stamping(self, subtests): with subtests.test("sig_1_res is stamped twice", stamps=["stamp2", "stamp1"]): assert sorted(sig_1_res._get_task_meta()["stamp1"]) == ["stamp1"] - assert sorted(sig_1_res._get_task_meta()["stamp2"]) == ["stamp2"] + assert sorted(sig_1_res._get_task_meta()["stamp2"]) == sorted(["stamp", "stamp2"]) assert sorted(sig_1_res._get_task_meta()["stamp3"]) == ["stamp3"] with subtests.test("sig_1_res is stamped twice", stamped_headers=["stamp2", "stamp1"]): @@ -711,7 +705,7 @@ def test_manual_stamping(self): sig_1.stamp(visitor=None, groups=stamps[0]) sig_1_res = sig_1.freeze() sig_1.apply() - assert sorted(sig_1_res._get_task_meta()["groups"]) == [stamps[0]] + assert sorted(sig_1_res._get_task_meta()["groups"]) == sorted(stamps) def test_custom_stamping_visitor(self, subtests): """ @@ -1227,81 +1221,3 @@ def test_chord_stamping_one_level(self, subtests): with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) - - @pytest.mark.usefixtures("depends_on_current_app") - def test_on_stamp_replaced_with_visitor(self): - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {"header": "value"} - - class CustomStampingVisitor2(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {"header2": "value2"} - - mytask = self.app.task(shared=False)(return_True) - - class AssertionTask(Task): - def on_stamp_replaced(self, sig: Signature, visitor=None): - assert "stamped_headers" not in sig.options - assert "header" not in sig.options - assert "header2" not in sig.options - # Here we make sure sig received the stamps from stamp_using_replace and assert_using_replace - # using the replace via on_stamp_replaced() - super().on_stamp_replaced(sig, visitor=visitor) - assert sorted(sig.options["stamped_headers"]) == sorted(["header", "header2"]) - assert sig.options["header"] == "value" - assert sig.options["header2"] == "value2" - - @self.app.task(shared=False, bind=True, base=AssertionTask) - def assert_using_replace(self: AssertionTask): - assert self.request.stamped_headers == ["header"] - assert self.request.stamps["header"] == ["value"] - return self.replace(mytask.s(), visitor=CustomStampingVisitor2()) - - @self.app.task(shared=False, bind=True) - def stamp_using_replace(self: Task): - assert self.request.stamped_headers is None, "stamped_headers should not be set" - assert self.request.stamps is None, "stamps should not be set" - return self.replace(assert_using_replace.s(), visitor=CustomStampingVisitor()) - - replaced_sig = group(stamp_using_replace.s(), self.add.s(1, 1)) | self.add.s(2, 2) - replaced_sig.apply() - - @pytest.mark.usefixtures("depends_on_current_app") - def test_on_stamp_replaced_without_visitor(self): - self.app.conf.task_always_eager = True - self.app.conf.task_store_eager_result = True - self.app.conf.result_extended = True - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return {"header": "value"} - - mytask = self.app.task(shared=False)(return_True) - - class AssertionTask(Task): - def on_stamp_replaced(self, sig: Signature, visitor=None): - assert "stamped_headers" not in sig.options - assert "header" not in sig.options - super().on_stamp_replaced(sig, visitor=visitor) - assert sig.options["stamped_headers"] == ["header"] - assert sig.options["header"] == "value" - - @self.app.task(shared=False, bind=True, base=AssertionTask) - def assert_using_replace(self: AssertionTask): - assert self.request.stamped_headers == ["header"] - assert self.request.stamps["header"] == ["value"] - return self.replace(mytask.s(), visitor=None) - - @self.app.task(shared=False, bind=True) - def stamp_using_replace(self: Task): - assert self.request.stamped_headers is None, "stamped_headers should not be set" - assert self.request.stamps is None, "stamps should not be set" - return self.replace(assert_using_replace.s(), visitor=CustomStampingVisitor()) - - replaced_sig = group(stamp_using_replace.s(), self.add.s(1, 1)) | self.add.s(2, 2) - replaced_sig.apply() From e3cf20856525f587abc0d95ef8d8f07efbad6b3d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 15 Feb 2023 21:00:51 +0200 Subject: [PATCH 0306/1051] Changed default append_stamps from True to False (meaning duplicates will be flatten based on latter) (#8068) --- celery/canvas.py | 14 +++++++------- examples/stamping/examples.py | 2 +- t/unit/tasks/test_stamping.py | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index faa70f4ecd6..06fef05d253 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -550,7 +550,7 @@ def set(self, immutable=None, **options): def set_immutable(self, immutable): self.immutable = immutable - def _stamp_headers(self, visitor_headers=None, append_stamps=True, self_headers=True, **headers): + def _stamp_headers(self, visitor_headers=None, append_stamps=False, self_headers=True, **headers): """Collect all stamps from visitor, headers and self, and return an idempotent dictionary of stamps. @@ -569,7 +569,7 @@ def _stamp_headers(self, visitor_headers=None, append_stamps=True, self_headers= Returns: Dict: Merged stamps. """ - # Use append_stamps=True to prioritize visitor_headers over headers in case of duplicated stamps. + # Use append_stamps=False to prioritize visitor_headers over headers in case of duplicated stamps. # This will lose duplicated headers from the headers argument, but that is the best effort solution # to avoid implicitly casting the duplicated stamp into a list of both stamps from headers and # visitor_headers of the same key. @@ -610,7 +610,7 @@ def _stamp_headers(self, visitor_headers=None, append_stamps=True, self_headers= return headers - def stamp(self, visitor=None, append_stamps=True, **headers): + def stamp(self, visitor=None, append_stamps=False, **headers): """Stamp this signature with additional custom headers. Using a visitor will pass on responsibility for the stamping to the visitor. @@ -632,7 +632,7 @@ def stamp(self, visitor=None, append_stamps=True, **headers): headers = self._stamp_headers(visitor_headers, append_stamps, **headers) return self.set(**headers) - def stamp_links(self, visitor, append_stamps=True, **headers): + def stamp_links(self, visitor, append_stamps=False, **headers): """Stamp this signature links (callbacks and errbacks). Using a visitor will pass on responsibility for the stamping to the visitor. @@ -1094,7 +1094,7 @@ def freeze(self, _id=None, group_id=None, chord=None, ) return results[0] - def stamp(self, visitor=None, append_stamps=True, **headers): + def stamp(self, visitor=None, append_stamps=False, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_chain_start(self, **headers) or {} @@ -1630,7 +1630,7 @@ def set_immutable(self, immutable): for task in self.tasks: task.set_immutable(immutable) - def stamp(self, visitor=None, append_stamps=True, **headers): + def stamp(self, visitor=None, append_stamps=False, **headers): visitor_headers = None if visitor is not None: visitor_headers = visitor.on_group_start(self, **headers) or {} @@ -2087,7 +2087,7 @@ def freeze(self, _id=None, group_id=None, chord=None, return body_result - def stamp(self, visitor=None, append_stamps=True, **headers): + def stamp(self, visitor=None, append_stamps=False, **headers): tasks = self.tasks if isinstance(tasks, group): tasks = tasks.tasks diff --git a/examples/stamping/examples.py b/examples/stamping/examples.py index f20ca38e86f..17cca8f6470 100644 --- a/examples/stamping/examples.py +++ b/examples/stamping/examples.py @@ -36,7 +36,7 @@ def run_example3(): sig1.link(sig1_link) sig1_link.stamp(FullVisitor()) sig1_link.stamp(MonitoringIdStampingVisitor()) - sig1.stamp(MonitoringIdStampingVisitor()) + sig1.stamp(MonitoringIdStampingVisitor(), append_stamps=True) sig1.delay() diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index a4cd953f3fb..51e0e3e92e6 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -678,7 +678,7 @@ def test_twice_stamping(self, subtests): sig_1 = self.add.s(2, 2) sig_1.stamp(stamp1="stamp1") sig_1.stamp(stamp2="stamp") - sig_1.stamp(stamp2="stamp2") + sig_1.stamp(stamp2="stamp2", append_stamps=True) sig_1.stamp(stamp3=["stamp3"]) sig_1_res = sig_1.freeze() sig_1.apply() @@ -702,7 +702,7 @@ def test_manual_stamping(self): sig_1 = self.add.s(2, 2) stamps = ["stamp1", "stamp2"] sig_1.stamp(visitor=None, groups=[stamps[1]]) - sig_1.stamp(visitor=None, groups=stamps[0]) + sig_1.stamp(visitor=None, groups=stamps[0], append_stamps=True) sig_1_res = sig_1.freeze() sig_1.apply() assert sorted(sig_1_res._get_task_meta()["groups"]) == sorted(stamps) From 83c32747b9d926c1f2a7f7f0b54a0e1153730dfa Mon Sep 17 00:00:00 2001 From: Yanick Champoux Date: Thu, 16 Feb 2023 16:24:20 -0500 Subject: [PATCH 0307/1051] typo in comment: mailicious => malicious --- extra/generic-init.d/celeryd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extra/generic-init.d/celeryd b/extra/generic-init.d/celeryd index b2c05d56ba0..13fdddef774 100755 --- a/extra/generic-init.d/celeryd +++ b/extra/generic-init.d/celeryd @@ -77,7 +77,7 @@ _config_sanity() { echo echo "Resolution:" echo "Review the file carefully, and make sure it hasn't been " - echo "modified with mailicious intent. When sure the " + echo "modified with malicious intent. When sure the " echo "script is safe to execute with superuser privileges " echo "you can change ownership of the script:" echo " $ sudo chown root '$path'" From 9315e93b6a8a2bcd65227c7f9da35b869908ecc7 Mon Sep 17 00:00:00 2001 From: Shukant Pal Date: Thu, 16 Feb 2023 12:56:21 -0500 Subject: [PATCH 0308/1051] Fix command for starting flower with specified broker URL --- docs/userguide/monitoring.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/monitoring.rst b/docs/userguide/monitoring.rst index c65e8413aa6..b542633ec9d 100644 --- a/docs/userguide/monitoring.rst +++ b/docs/userguide/monitoring.rst @@ -299,9 +299,9 @@ Broker URL can also be passed through the .. code-block:: console - $ celery flower --broker=amqp://guest:guest@localhost:5672// + $ celery --broker=amqp://guest:guest@localhost:5672// flower or - $ celery flower --broker=redis://guest:guest@localhost:6379/0 + $ celery --broker=redis://guest:guest@localhost:6379/0 flower Then, you can visit flower in your web browser : From 2b4b500ca1212016824a5fa2996cfb752f0763a7 Mon Sep 17 00:00:00 2001 From: Norbert Cyran Date: Fri, 17 Feb 2023 18:34:41 +0100 Subject: [PATCH 0309/1051] Improve documentation on ETA/countdown tasks (#8069) --- docs/faq.rst | 5 +++-- .../backends-and-brokers/redis.rst | 10 ++++++---- docs/userguide/calling.rst | 18 ++++++++++++++++++ 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/docs/faq.rst b/docs/faq.rst index 29cd77900bd..01c92d425ce 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -878,9 +878,10 @@ Can I schedule tasks to execute at a specific time? --------------------------------------------------- **Answer**: Yes. You can use the `eta` argument of :meth:`Task.apply_async`. +Note that using distant `eta` times is not recommended, and in such case +:ref:`periodic tasks` should be preferred. -See also :ref:`guide-beat`. - +See :ref:`calling-eta` for more details. .. _faq-safe-worker-shutdown: diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 1c583f0bb27..e7760762c8f 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -151,14 +151,16 @@ This causes problems with ETA/countdown/retry tasks where the time to execute exceeds the visibility timeout; in fact if that happens it will be executed again, and again in a loop. -So you have to increase the visibility timeout to match -the time of the longest ETA you're planning to use. - -Note that Celery will redeliver messages at worker shutdown, +To remediate that, you can increase the visibility timeout to match +the time of the longest ETA you're planning to use. However, this is not +recommended as it may have negative impact on the reliability. +Celery will redeliver messages at worker shutdown, so having a long visibility timeout will only delay the redelivery of 'lost' tasks in the event of a power failure or forcefully terminated workers. +Broker is not a database, so if you are in need of scheduling tasks for +a more distant future, database-backed periodic task might be a better choice. Periodic tasks won't be affected by the visibility timeout, as this is a concept separate from ETA/countdown. diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 10fd1e4414d..6bffd47fbf6 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -255,6 +255,24 @@ and timezone information): >>> tomorrow = datetime.utcnow() + timedelta(days=1) >>> add.apply_async((2, 2), eta=tomorrow) +.. warning:: + + Tasks with `eta` or `countdown` are immediately fetched by the worker + and until the scheduled time passes, they reside in the worker's memory. + When using those options to schedule lots of tasks for a distant future, + those tasks may accumulate in the worker and make a significant impact on + the RAM usage. + + Moreover, tasks are not acknowledged until the worker starts executing + them. If using Redis as a broker, task will get redelivered when `countdown` + exceeds `visibility_timeout` (see :ref:`redis-caveats`). + + Therefore, using `eta` and `countdown` **is not recommended** for + scheduling tasks for a distant future. Ideally, use values no longer + than several minutes. For longer durations, consider using + database-backed periodic tasks, e.g. with :pypi:`django-celery-beat` if + using Django (see :ref:`beat-custom-schedulers`). + .. warning:: When using RabbitMQ as a message broker when specifying a ``countdown`` From e81aba655b9cab64afbf102d1d5e7987ee872d69 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 19 Feb 2023 12:26:01 +0200 Subject: [PATCH 0310/1051] =?UTF-8?q?Bump=20version:=205.3.0b1=20=E2=86=92?= =?UTF-8?q?=205.3.0b2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 4 ++-- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 02c8c493039..144713a9d3f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,9 +1,9 @@ [bumpversion] -current_version = 5.3.0b1 +current_version = 5.3.0b2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? -serialize = +serialize = {major}.{minor}.{patch}{releaselevel} {major}.{minor}.{patch} diff --git a/README.rst b/README.rst index 59b457b4086..5a5da257e19 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0b1 (dawn-chorus) +:Version: 5.3.0b2 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 7c2de763898..16c16d85b1d 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.3.0b1' +__version__ = '5.3.0b2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index cc2017543d6..66d4ea1b592 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0b1 (dawn-chorus) +:Version: 5.3.0b2 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From cb4c3256326e33f005c63d48520d0abb5e898151 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Feb 2023 17:24:28 +0000 Subject: [PATCH 0311/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.0.0 → v1.0.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.0.0...v1.0.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6fb81019a8a..d77e0c99509 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.0 + rev: v1.0.1 hooks: - id: mypy pass_filenames: false From ade70b0324193e0906af18c7302462ecbafe096f Mon Sep 17 00:00:00 2001 From: cui fliter Date: Sat, 25 Feb 2023 20:41:14 +0800 Subject: [PATCH 0312/1051] fix functiom name Signed-off-by: cui fliter --- t/unit/app/test_schedules.py | 54 ++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 4fc91113dfa..793e8b6f3a2 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -244,79 +244,79 @@ class test_crontab_remaining_estimate: def crontab(self, *args, **kwargs): return crontab(*args, **dict(kwargs, app=self.app)) - def next_ocurrance(self, crontab, now): + def next_occurrence(self, crontab, now): crontab.nowfun = lambda: now return now + crontab.remaining_estimate(now) def test_next_minute(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 11, 14, 31) def test_not_next_minute(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(), datetime(2010, 9, 11, 14, 59, 15), ) assert next == datetime(2010, 9, 11, 15, 0) def test_this_hour(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42]), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 11, 14, 42) def test_not_this_hour(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 10, 15]), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 11, 15, 5) def test_today(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], hour=[12, 17]), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 11, 17, 5) def test_not_today(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], hour=[12]), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 12, 12, 5) def test_weekday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_week='sat'), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 18, 14, 30) def test_not_weekday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon-fri'), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 13, 0, 5) def test_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_month=18), datetime(2010, 9, 11, 14, 30, 15), ) assert next == datetime(2010, 9, 18, 14, 30) def test_not_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_month=29), datetime(2010, 1, 22, 14, 30, 15), ) assert next == datetime(2010, 1, 29, 0, 5) def test_weekday_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_week='mon', day_of_month=18), datetime(2010, 1, 18, 14, 30, 15), @@ -324,42 +324,42 @@ def test_weekday_monthday(self): assert next == datetime(2010, 10, 18, 14, 30) def test_monthday_not_weekday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='sat', day_of_month=29), datetime(2010, 1, 29, 0, 5, 15), ) assert next == datetime(2010, 5, 29, 0, 5) def test_weekday_not_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon', day_of_month=18), datetime(2010, 1, 11, 0, 5, 15), ) assert next == datetime(2010, 1, 18, 0, 5) def test_not_weekday_not_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon', day_of_month=18), datetime(2010, 1, 10, 0, 5, 15), ) assert next == datetime(2010, 1, 18, 0, 5) def test_leapday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_month=29), datetime(2012, 1, 29, 14, 30, 15), ) assert next == datetime(2012, 2, 29, 14, 30) def test_not_leapday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_month=29), datetime(2010, 1, 29, 14, 30, 15), ) assert next == datetime(2010, 3, 29, 14, 30) def test_weekmonthdayyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_week='fri', day_of_month=29, month_of_year=1), datetime(2010, 1, 22, 14, 30, 15), @@ -367,7 +367,7 @@ def test_weekmonthdayyear(self): assert next == datetime(2010, 1, 29, 14, 30) def test_monthdayyear_not_week(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='wed,thu', day_of_month=29, month_of_year='1,4,7'), datetime(2010, 1, 29, 14, 30, 15), @@ -375,7 +375,7 @@ def test_monthdayyear_not_week(self): assert next == datetime(2010, 4, 29, 0, 5) def test_weekdaymonthyear_not_monthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_week='fri', day_of_month=29, month_of_year='1-10'), datetime(2010, 1, 29, 14, 30, 15), @@ -383,7 +383,7 @@ def test_weekdaymonthyear_not_monthday(self): assert next == datetime(2010, 10, 29, 14, 30) def test_weekmonthday_not_monthyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='fri', day_of_month=29, month_of_year='2-10'), datetime(2010, 1, 29, 14, 30, 15), @@ -391,7 +391,7 @@ def test_weekmonthday_not_monthyear(self): assert next == datetime(2010, 10, 29, 0, 5) def test_weekday_not_monthdayyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon', day_of_month=18, month_of_year='2-10'), datetime(2010, 1, 11, 0, 5, 15), @@ -399,7 +399,7 @@ def test_weekday_not_monthdayyear(self): assert next == datetime(2010, 10, 18, 0, 5) def test_monthday_not_weekdaymonthyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon', day_of_month=29, month_of_year='2-4'), datetime(2010, 1, 29, 0, 5, 15), @@ -407,7 +407,7 @@ def test_monthday_not_weekdaymonthyear(self): assert next == datetime(2010, 3, 29, 0, 5) def test_monthyear_not_weekmonthday(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='mon', day_of_month=29, month_of_year='2-4'), datetime(2010, 2, 28, 0, 5, 15), @@ -415,7 +415,7 @@ def test_monthyear_not_weekmonthday(self): assert next == datetime(2010, 3, 29, 0, 5) def test_not_weekmonthdayyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=[5, 42], day_of_week='fri,sat', day_of_month=29, month_of_year='2-10'), datetime(2010, 1, 28, 14, 30, 15), @@ -426,13 +426,13 @@ def test_invalid_specification(self): # *** WARNING *** # This test triggers an infinite loop in case of a regression with pytest.raises(RuntimeError): - self.next_ocurrance( + self.next_occurrence( self.crontab(day_of_month=31, month_of_year=4), datetime(2010, 1, 28, 14, 30, 15), ) def test_leapyear(self): - next = self.next_ocurrance( + next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_month=29, month_of_year=2), datetime(2012, 2, 29, 14, 30), ) From d77844d7264b72b13f0cda7c4bbba6598f9e7223 Mon Sep 17 00:00:00 2001 From: Mo Balaa Date: Fri, 24 Feb 2023 23:26:06 -0600 Subject: [PATCH 0313/1051] Update configuration.rst --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index a43681b10aa..eb83c05b7ea 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -129,7 +129,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_SECURITY_KEY_PASSWORD`` :setting:`security_key_password` ``CELERY_ACKS_LATE`` :setting:`task_acks_late` ``CELERY_ACKS_ON_FAILURE_OR_TIMEOUT`` :setting:`task_acks_on_failure_or_timeout` -``CELERY_ALWAYS_EAGER`` :setting:`task_always_eager` +``CELERY_TASK_ALWAYS_EAGER`` :setting:`task_always_eager` ``CELERY_ANNOTATIONS`` :setting:`task_annotations` ``CELERY_COMPRESSION`` :setting:`task_compression` ``CELERY_CREATE_MISSING_QUEUES`` :setting:`task_create_missing_queues` From f4ad2e1289dc003a9906beff5aea8fc33f1abcef Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Mar 2023 10:21:12 +0200 Subject: [PATCH 0314/1051] Stamping documentation fixes & cleanups (#8092) --- docs/userguide/canvas.rst | 49 ++++++--------------------------------- 1 file changed, 7 insertions(+), 42 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index c147777cb98..32042054758 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1170,29 +1170,11 @@ will initialize a group ``g`` and mark its components with stamp ``your_custom_s For this feature to be useful, you need to set the :setting:`result_extended` configuration option to ``True`` or directive ``result_extended = True``. - -Group stamping --------------- - -When the ``apply`` and ``apply_async`` methods are called, -there is an automatic stamping signature with group id. -Stamps are stored in group header. -For example, after - -.. code-block:: pycon - - >>> g.apply_async() - -the header of task sig1 will store the stamp groups with g.id. -In the case of nested groups, the order of the stamps corresponds -to the nesting level. The group stamping is idempotent; -the task cannot be stamped twice with the same group id. - Canvas stamping ---------------- -In addition to the default group stamping, we can also stamp -canvas with custom stamps, as shown in the example. +We can also stamp the canvas with custom stamping logic, using the visitor class ``StampingVisitor`` +as the base class for the custom stamping visitor. Custom stamping ---------------- @@ -1200,7 +1182,7 @@ Custom stamping If more complex stamping logic is required, it is possible to implement custom stamping behavior based on the Visitor pattern. The class that implements this custom logic must -inherit ``VisitorStamping`` and implement appropriate methods. +inherit ``StampingVisitor`` and implement appropriate methods. For example, the following example ``InGroupVisitor`` will label tasks that are in side of some group by label ``in_group``. @@ -1238,9 +1220,10 @@ the external monitoring system, etc. .. note:: - The ``stamped_headers`` key returned in ``on_signature`` is used to specify the headers that will be - stamped on the task. If this key is not specified, the stamping visitor will assume all keys in the - returned dictionary are the stamped headers from the visitor. + The ``stamped_headers`` key returned in ``on_signature`` (or any other visitor method) is used to + specify the headers that will be stamped on the task. If this key is not specified, the stamping + visitor will assume all keys in the returned dictionary are the stamped headers from the visitor. + This means the following code block will result in the same behavior as the previous example. .. code-block:: python @@ -1320,21 +1303,3 @@ This example will result in the following stamps: {'header': 'value', 'on_callback': True, 'stamped_headers': ['header', 'on_callback']} >>> c.body.options['link_error'][0].options {'header': 'value', 'on_errback': True, 'stamped_headers': ['header', 'on_errback']} - -When calling ``apply_async()`` on ``c``, the group stamping will be applied on top of the above stamps. -This will result in the following stamps: - -.. code-block:: python - - >>> c.options - {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} - >>> c.tasks.tasks[0].options - {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} - >>> c.tasks.tasks[1].options - {'header': 'value', 'groups': ['1234'], 'stamped_headers': ['header', 'groups']} - >>> c.body.options - {'header': 'value', 'groups': [], 'stamped_headers': ['header', 'groups']} - >>> c.body.options['link'][0].options - {'header': 'value', 'on_callback': True, 'groups': [], 'stamped_headers': ['header', 'on_callback', 'groups']} - >>> c.body.options['link_error'][0].options - {'header': 'value', 'on_errback': True, 'groups': [], 'stamped_headers': ['header', 'on_errback', 'groups']} From 005714987c541f710b763f61fc1e957f59e071a2 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 2 Mar 2023 16:27:43 +0600 Subject: [PATCH 0315/1051] switch to maintained pyro5 --- requirements/extras/pyro.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pyro.txt b/requirements/extras/pyro.txt index bde9e2995b9..bb73cdd74f2 100644 --- a/requirements/extras/pyro.txt +++ b/requirements/extras/pyro.txt @@ -1 +1 @@ -pyro4==4.82 +pyro5 From daa788a6677860523c066fce788ae25f50340827 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 2 Mar 2023 20:34:45 +0600 Subject: [PATCH 0316/1051] udate dependencies of tests (#8095) * udate dependencies of tests * Update requirements/test.txt --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 1010de42936..a03c774750b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,13 +1,13 @@ -pytest==7.2.0 +pytest==7.2.1 pytest-celery==0.0.0 -pytest-subtests==0.9.0 +pytest-subtests==0.10.0 pytest-timeout~=2.1.0 pytest-click==1.1.0 pytest-order==1.0.1 boto3>=1.9.178 moto>=2.2.6 # typing extensions -mypy==0.991; platform_python_implementation=="CPython" +mypy==1.0.1; platform_python_implementation=="CPython" pre-commit==2.21.0 -r extras/yaml.txt -r extras/msgpack.txt From 3bff3f06740a0d509f807e14702f7144b043ae54 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 2 Mar 2023 20:35:59 +0600 Subject: [PATCH 0317/1051] cryptography==39.0.1 (#8096) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index e5c1e4d0870..f50d2fca306 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==39.0.0 +cryptography==39.0.1 From 311fa62a808d9c509d25aaf8c1530fbb5304d818 Mon Sep 17 00:00:00 2001 From: Marcelo Trylesinski Date: Fri, 3 Mar 2023 01:25:19 +0100 Subject: [PATCH 0318/1051] Annotate `celery/security/certificate.py` (#7398) * Annotate `celery/security/certificate.py` * Update celery/security/certificate.py Co-authored-by: Omer Katz * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Add missing return on __init__ * Fix pre-commit issues --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/security/certificate.py | 45 ++++++++++++++++++++++------------ pyproject.toml | 3 ++- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/celery/security/certificate.py b/celery/security/certificate.py index ebc8cd630d7..80398b39f6d 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -1,7 +1,10 @@ """X.509 certificates.""" +from __future__ import annotations + import datetime import glob import os +from typing import TYPE_CHECKING, Iterator from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding, rsa @@ -12,13 +15,23 @@ from .utils import reraise_errors +if TYPE_CHECKING: + from cryptography.hazmat.primitives.asymmetric.dsa import DSAPublicKey + from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey + from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PublicKey + from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey + from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey + from cryptography.hazmat.primitives.asymmetric.utils import Prehashed + from cryptography.hazmat.primitives.hashes import HashAlgorithm + + __all__ = ('Certificate', 'CertStore', 'FSCertStore') class Certificate: """X.509 certificate.""" - def __init__(self, cert): + def __init__(self, cert: str) -> None: with reraise_errors( 'Invalid certificate: {0!r}', errors=(ValueError,) ): @@ -28,27 +41,28 @@ def __init__(self, cert): if not isinstance(self._cert.public_key(), rsa.RSAPublicKey): raise ValueError("Non-RSA certificates are not supported.") - def has_expired(self): + def has_expired(self) -> bool: """Check if the certificate has expired.""" return datetime.datetime.utcnow() >= self._cert.not_valid_after - def get_pubkey(self) -> rsa.RSAPublicKey: - """Get public key from certificate. Public key type is checked in __init__.""" + def get_pubkey(self) -> ( + DSAPublicKey | EllipticCurvePublicKey | Ed448PublicKey | Ed25519PublicKey | RSAPublicKey + ): return self._cert.public_key() - def get_serial_number(self): + def get_serial_number(self) -> int: """Return the serial number in the certificate.""" return self._cert.serial_number - def get_issuer(self): + def get_issuer(self) -> str: """Return issuer (CA) as a string.""" return ' '.join(x.value for x in self._cert.issuer) - def get_id(self): + def get_id(self) -> str: """Serial number/issuer pair uniquely identifies a certificate.""" return f'{self.get_issuer()} {self.get_serial_number()}' - def verify(self, data, signature, digest): + def verify(self, data: bytes, signature: bytes, digest: HashAlgorithm | Prehashed) -> None: """Verify signature for string containing data.""" with reraise_errors('Bad signature: {0!r}'): @@ -56,28 +70,27 @@ def verify(self, data, signature, digest): mgf=padding.MGF1(digest), salt_length=padding.PSS.MAX_LENGTH) - self.get_pubkey().verify(signature, - ensure_bytes(data), pad, digest) + self.get_pubkey().verify(signature, ensure_bytes(data), pad, digest) class CertStore: """Base class for certificate stores.""" - def __init__(self): - self._certs = {} + def __init__(self) -> None: + self._certs: dict[str, Certificate] = {} - def itercerts(self): + def itercerts(self) -> Iterator[Certificate]: """Return certificate iterator.""" yield from self._certs.values() - def __getitem__(self, id): + def __getitem__(self, id: str) -> Certificate: """Get certificate by id.""" try: return self._certs[bytes_to_str(id)] except KeyError: raise SecurityError(f'Unknown certificate: {id!r}') - def add_cert(self, cert): + def add_cert(self, cert: Certificate) -> None: cert_id = bytes_to_str(cert.get_id()) if cert_id in self._certs: raise SecurityError(f'Duplicate certificate: {id!r}') @@ -87,7 +100,7 @@ def add_cert(self, cert): class FSCertStore(CertStore): """File system certificate store.""" - def __init__(self, path): + def __init__(self, path: str) -> None: super().__init__() if os.path.isdir(path): path = os.path.join(path, '*') diff --git a/pyproject.toml b/pyproject.toml index 393f1d49656..2c267e8b617 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,8 @@ files = [ "celery/states.py", "celery/signals.py", "celery/fixups", - "celery/concurrency/thread.py" + "celery/concurrency/thread.py", + "celery/security/certificate.py", ] [tool.coverage.run] From 0242b82727b633a552ca02e710bb1182cf67a16f Mon Sep 17 00:00:00 2001 From: Trenton H <797416+stumpylog@users.noreply.github.com> Date: Thu, 2 Mar 2023 08:50:52 -0800 Subject: [PATCH 0319/1051] Replaces parse_iso8601 with fromisoformat and marks as deprecated --- celery/result.py | 3 +-- celery/utils/iso8601.py | 3 +++ celery/utils/time.py | 3 +-- t/unit/tasks/test_tasks.py | 5 ++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/celery/result.py b/celery/result.py index eb3e154933b..f66bade1d40 100644 --- a/celery/result.py +++ b/celery/result.py @@ -14,7 +14,6 @@ from .app import app_or_default from .exceptions import ImproperlyConfigured, IncompleteStream, TimeoutError from .utils.graph import DependencyGraph, GraphFormatter -from .utils.iso8601 import parse_iso8601 try: import tblib @@ -530,7 +529,7 @@ def date_done(self): """UTC date and time.""" date_done = self._get_task_meta().get('date_done') if date_done and not isinstance(date_done, datetime.datetime): - return parse_iso8601(date_done) + return datetime.datetime.fromisoformat(date_done) return date_done @property diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 4f9d183312b..2a5ae69619f 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -37,6 +37,8 @@ from pytz import FixedOffset +from celery.utils.deprecated import warn + __all__ = ('parse_iso8601',) # Adapted from http://delete.me.uk/2005/03/iso8601.html @@ -53,6 +55,7 @@ def parse_iso8601(datestring): """Parse and convert ISO-8601 string to datetime.""" + warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat") m = ISO8601_REGEX.match(datestring) if not m: raise ValueError('unable to parse date string %r' % datestring) diff --git a/celery/utils/time.py b/celery/utils/time.py index ed4008c6e48..984da17c80f 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -13,7 +13,6 @@ from pytz import utc from .functional import dictfilter -from .iso8601 import parse_iso8601 from .text import pluralize __all__ = ( @@ -257,7 +256,7 @@ def maybe_iso8601(dt): return if isinstance(dt, datetime): return dt - return parse_iso8601(dt) + return datetime.fromisoformat(dt) def is_naive(dt): diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index a636eac73be..0095bac3405 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -13,7 +13,6 @@ from celery.contrib.testing.mocks import ContextMock from celery.exceptions import Ignore, ImproperlyConfigured, Retry from celery.result import AsyncResult, EagerResult -from celery.utils.time import parse_iso8601 try: from urllib.error import HTTPError @@ -889,11 +888,11 @@ def assert_next_task_data_equal(self, consumer, presult, task_name, assert task_headers['task'] == task_name if test_eta: assert isinstance(task_headers.get('eta'), str) - to_datetime = parse_iso8601(task_headers.get('eta')) + to_datetime = datetime.fromisoformat(task_headers.get('eta')) assert isinstance(to_datetime, datetime) if test_expires: assert isinstance(task_headers.get('expires'), str) - to_datetime = parse_iso8601(task_headers.get('expires')) + to_datetime = datetime.fromisoformat(task_headers.get('expires')) assert isinstance(to_datetime, datetime) properties = properties or {} for arg_name, arg_value in properties.items(): From 78292471fe014e5d819b14c819326b320d98746a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 4 Mar 2023 15:50:55 +0600 Subject: [PATCH 0320/1051] pytest==7.2.2 (#8106) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index a03c774750b..a766f6c7c17 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.2.1 +pytest==7.2.2 pytest-celery==0.0.0 pytest-subtests==0.10.0 pytest-timeout~=2.1.0 From 32a83e2037c6ff431f74b522b4a41914e01f150e Mon Sep 17 00:00:00 2001 From: Maxwell Muoto Date: Sun, 5 Mar 2023 02:47:08 -0600 Subject: [PATCH 0321/1051] Type annotations for `celery/utils/text.py` (#8107) * Annotate text * fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * fix * Add to pyproject * Fix * remove comment * Small fix * remove comment * remoev unused arg * Fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * fix * build fix * type fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * pytest==7.2.2 (#8106) * Fix * Fix * Type checking fix * Update celery/utils/text.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/utils/text.py | 74 ++++++++++++++++++++------------------------ pyproject.toml | 1 + 2 files changed, 34 insertions(+), 41 deletions(-) diff --git a/celery/utils/text.py b/celery/utils/text.py index f7b7571d57b..3dc7ade973f 100644 --- a/celery/utils/text.py +++ b/celery/utils/text.py @@ -1,11 +1,13 @@ """Text formatting utilities.""" +from __future__ import annotations + import io import re -from collections.abc import Callable from functools import partial from pprint import pformat +from re import Match from textwrap import fill -from typing import Any, List, Mapping, Pattern # noqa +from typing import Any, Callable, Pattern __all__ = ( 'abbr', 'abbrtask', 'dedent', 'dedent_initial', @@ -23,40 +25,34 @@ RE_FORMAT = re.compile(r'%(\w)') -def str_to_list(s): - # type: (str) -> List[str] +def str_to_list(s: str) -> list[str]: """Convert string to list.""" if isinstance(s, str): return s.split(',') return s -def dedent_initial(s, n=4): - # type: (str, int) -> str +def dedent_initial(s: str, n: int = 4) -> str: """Remove indentation from first line of text.""" return s[n:] if s[:n] == ' ' * n else s -def dedent(s, n=4, sep='\n'): - # type: (str, int, str) -> str +def dedent(s: str, sep: str = '\n') -> str: """Remove indentation.""" return sep.join(dedent_initial(l) for l in s.splitlines()) -def fill_paragraphs(s, width, sep='\n'): - # type: (str, int, str) -> str +def fill_paragraphs(s: str, width: int, sep: str = '\n') -> str: """Fill paragraphs with newlines (or custom separator).""" return sep.join(fill(p, width) for p in s.split(sep)) -def join(l, sep='\n'): - # type: (str, str) -> str +def join(l: list[str], sep: str = '\n') -> str: """Concatenate list of strings.""" return sep.join(v for v in l if v) -def ensure_sep(sep, s, n=2): - # type: (str, str, int) -> str +def ensure_sep(sep: str, s: str, n: int = 2) -> str: """Ensure text s ends in separator sep'.""" return s + sep * (n - s.count(sep)) @@ -64,18 +60,17 @@ def ensure_sep(sep, s, n=2): ensure_newlines = partial(ensure_sep, '\n') -def abbr(S, max, ellipsis='...'): - # type: (str, int, str) -> str +def abbr(S: str, max: int, ellipsis: str | bool = '...') -> str: """Abbreviate word.""" if S is None: return '???' if len(S) > max: - return ellipsis and (S[:max - len(ellipsis)] + ellipsis) or S[:max] + return isinstance(ellipsis, str) and ( + S[: max - len(ellipsis)] + ellipsis) or S[: max] return S -def abbrtask(S, max): - # type: (str, int) -> str +def abbrtask(S: str, max: int) -> str: """Abbreviate task name.""" if S is None: return '???' @@ -86,33 +81,30 @@ def abbrtask(S, max): return S -def indent(t, indent=0, sep='\n'): - # type: (str, int, str) -> str +def indent(t: str, indent: int = 0, sep: str = '\n') -> str: """Indent text.""" return sep.join(' ' * indent + p for p in t.split(sep)) -def truncate(s, maxlen=128, suffix='...'): - # type: (str, int, str) -> str +def truncate(s: str, maxlen: int = 128, suffix: str = '...') -> str: """Truncate text to a maximum number of characters.""" if maxlen and len(s) >= maxlen: return s[:maxlen].rsplit(' ', 1)[0] + suffix return s -def pluralize(n, text, suffix='s'): - # type: (int, str, str) -> str +def pluralize(n: int, text: str, suffix: str = 's') -> str: """Pluralize term when n is greater than one.""" if n != 1: return text + suffix return text -def pretty(value, width=80, nl_width=80, sep='\n', **kw): - # type: (str, int, int, str, **Any) -> str +def pretty(value: str, width: int = 80, nl_width: int = 80, sep: str = '\n', ** + kw: Any) -> str: """Format value for printing to console.""" if isinstance(value, dict): - return f'{{{sep} {pformat(value, 4, nl_width)[1:]}' + return f'{sep} {pformat(value, 4, nl_width)[1:]}' elif isinstance(value, tuple): return '{}{}{}'.format( sep, ' ' * 4, pformat(value, width=nl_width, **kw), @@ -121,24 +113,24 @@ def pretty(value, width=80, nl_width=80, sep='\n', **kw): return pformat(value, width=width, **kw) -def match_case(s, other): - # type: (str, str) -> str +def match_case(s: str, other: str) -> str: return s.upper() if other.isupper() else s.lower() -def simple_format(s, keys, pattern=RE_FORMAT, expand=r'\1'): - # type: (str, Mapping[str, str], Pattern, str) -> str +def simple_format( + s: str, keys: dict[str, str | Callable], + pattern: Pattern[str] = RE_FORMAT, expand: str = r'\1') -> str: """Format string, expanding abbreviations in keys'.""" if s: keys.setdefault('%', '%') - def resolve(match): + def resolve(match: Match) -> str | Any: key = match.expand(expand) try: resolver = keys[key] except KeyError: raise ValueError(UNKNOWN_SIMPLE_FORMAT_KEY.format(key, s)) - if isinstance(resolver, Callable): + if callable(resolver): return resolver() return resolver @@ -146,8 +138,7 @@ def resolve(match): return s -def remove_repeating_from_task(task_name, s): - # type: (str, str) -> str +def remove_repeating_from_task(task_name: str, s: str) -> str: """Given task name, remove repeating module names. Example: @@ -162,8 +153,7 @@ def remove_repeating_from_task(task_name, s): return remove_repeating(module, s) -def remove_repeating(substr, s): - # type: (str, str) -> str +def remove_repeating(substr: str, s: str) -> str: """Remove repeating module names from string. Arguments: @@ -199,8 +189,10 @@ def remove_repeating(substr, s): class WhateverIO(StringIO): """StringIO that takes bytes or str.""" - def __init__(self, v=None, *a, **kw): + def __init__( + self, v: bytes | str | None = None, *a: Any, **kw: Any) -> None: _SIO_init(self, v.decode() if isinstance(v, bytes) else v, *a, **kw) - def write(self, data): - _SIO_write(self, data.decode() if isinstance(data, bytes) else data) + def write(self, data: bytes | str) -> int: + return _SIO_write(self, data.decode() + if isinstance(data, bytes) else data) diff --git a/pyproject.toml b/pyproject.toml index 2c267e8b617..bbdb945a1a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ files = [ "celery/fixups", "celery/concurrency/thread.py", "celery/security/certificate.py", + "celery/utils/text.py", ] [tool.coverage.run] From 159e63ca63ef605d793a10dc4ae897c9ad48206d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Blondon?= Date: Mon, 6 Mar 2023 16:38:53 +0100 Subject: [PATCH 0322/1051] Update web framework URLs --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 5a5da257e19..8dfa7940ceb 100644 --- a/README.rst +++ b/README.rst @@ -206,14 +206,14 @@ database connections at ``fork``. .. _`Django`: https://djangoproject.com/ .. _`Pylons`: http://pylonsproject.org/ -.. _`Flask`: http://flask.pocoo.org/ +.. _`Flask`: https://flask.palletsprojects.com/ .. _`web2py`: http://web2py.com/ .. _`Bottle`: https://bottlepy.org/ -.. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html +.. _`Pyramid`: https://docs.pylonsproject.org/projects/pyramid/en/latest/ .. _`pyramid_celery`: https://pypi.org/project/pyramid_celery/ .. _`celery-pylons`: https://pypi.org/project/celery-pylons/ .. _`web2py-celery`: https://code.google.com/p/web2py-celery/ -.. _`Tornado`: http://www.tornadoweb.org/ +.. _`Tornado`: https://www.tornadoweb.org/ .. _`tornado-celery`: https://github.com/mher/tornado-celery/ .. _celery-documentation: From 5d3ec7c7c8420f6c07d7e280b486647a373ba208 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Blondon?= Date: Mon, 6 Mar 2023 16:12:43 +0100 Subject: [PATCH 0323/1051] Fix contribution URL --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 8dfa7940ceb..dd34974d16e 100644 --- a/README.rst +++ b/README.rst @@ -461,7 +461,7 @@ Be sure to also read the `Contributing to Celery`_ section in the documentation. .. _`Contributing to Celery`: - https://docs.celeryq.dev/en/main/contributing.html + https://docs.celeryq.dev/en/stable/contributing.html |oc-contributors| From 5ae9696a244452d1af48c340fc888eb63d130db7 Mon Sep 17 00:00:00 2001 From: Pamela Fox Date: Tue, 7 Mar 2023 16:25:52 -0800 Subject: [PATCH 0324/1051] Trying to clarify CERT_REQUIRED --- docs/userguide/configuration.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index eb83c05b7ea..1831157d612 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1230,8 +1230,9 @@ Use the ``rediss://`` protocol to connect to redis over TLS:: result_backend = 'rediss://username:password@host:port/db?ssl_cert_reqs=required' Note that the ``ssl_cert_reqs`` string should be one of ``required``, -``optional``, or ``none`` (though, for backwards compatibility, the string -may also be one of ``CERT_REQUIRED``, ``CERT_OPTIONAL``, ``CERT_NONE``). +``optional``, or ``none`` (though, for backwards compatibility with older Celery versions, the string +may also be one of ``CERT_REQUIRED``, ``CERT_OPTIONAL``, ``CERT_NONE``, but those values +only work for Celery, not for Redis directly). If a Unix socket connection should be used, the URL needs to be in the format::: From b22a34f96ddbf1fc2a6995832505be30c2ba89de Mon Sep 17 00:00:00 2001 From: Raphael Cohen Date: Wed, 8 Mar 2023 10:50:35 +0100 Subject: [PATCH 0325/1051] Fix potential AttributeError on 'stamps' --- celery/backends/base.py | 2 +- t/unit/backends/test_base.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 8a391cf0baa..4216c3b343e 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -491,7 +491,7 @@ def _get_result_meta(self, result, if hasattr(request, 'delivery_info') and request.delivery_info else None, } - if getattr(request, 'stamps'): + if getattr(request, 'stamps', None): request_meta['stamped_headers'] = request.stamped_headers request_meta.update(request.stamps) diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index 981305c72f4..1a355d3c3ef 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -125,6 +125,18 @@ def test_get_result_meta(self): assert meta['kwargs'] == kwargs assert meta['queue'] == 'celery' + def test_get_result_meta_stamps_attribute_error(self): + class Request: + pass + self.app.conf.result_extended = True + b1 = BaseBackend(self.app) + meta = b1._get_result_meta(result={'fizz': 'buzz'}, + state=states.SUCCESS, traceback=None, + request=Request()) + assert meta['status'] == states.SUCCESS + assert meta['result'] == {'fizz': 'buzz'} + assert meta['traceback'] is None + def test_get_result_meta_encoded(self): self.app.conf.result_extended = True b1 = BaseBackend(self.app) From 93bccdce88de24713aa935ec590f067925fd8179 Mon Sep 17 00:00:00 2001 From: Maxwell Muoto Date: Wed, 8 Mar 2023 18:35:42 -0600 Subject: [PATCH 0326/1051] Type annotations for `celery/apps/beat.py` (#8108) * Type annotations * small change * unused import * Type annotations for `celery/utils/text.py` (#8107) * Annotate text * fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * fix * Add to pyproject * Fix * remove comment * Small fix * remove comment * remoev unused arg * Fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * fix * build fix * type fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * pytest==7.2.2 (#8106) * Fix * Fix * Type checking fix * Update celery/utils/text.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin * Pre-commit fix * value error change * Fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Unit test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Comment fix * Fix * Future import * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * revert * format fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update celery/apps/beat.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/apps/beat.py | 46 ++++++++++++++++++++++------------------- celery/schedules.py | 5 +++-- pyproject.toml | 1 + t/unit/app/test_beat.py | 7 +++++-- 4 files changed, 34 insertions(+), 25 deletions(-) diff --git a/celery/apps/beat.py b/celery/apps/beat.py index dbed1ed442f..7258ac8555b 100644 --- a/celery/apps/beat.py +++ b/celery/apps/beat.py @@ -6,12 +6,17 @@ as an actual application, like installing signal handlers and so on. """ +from __future__ import annotations + import numbers import socket import sys from datetime import datetime +from signal import Signals +from types import FrameType +from typing import Any -from celery import VERSION_BANNER, beat, platforms +from celery import VERSION_BANNER, Celery, beat, platforms from celery.utils.imports import qualname from celery.utils.log import LOG_LEVELS, get_logger from celery.utils.time import humanize_seconds @@ -36,16 +41,16 @@ class Beat: """Beat as a service.""" Service = beat.Service - app = None - - def __init__(self, max_interval=None, app=None, - socket_timeout=30, pidfile=None, no_color=None, - loglevel='WARN', logfile=None, schedule=None, - scheduler=None, - scheduler_cls=None, # XXX use scheduler - redirect_stdouts=None, - redirect_stdouts_level=None, - quiet=False, **kwargs): + app: Celery = None + + def __init__(self, max_interval: int | None = None, app: Celery | None = None, + socket_timeout: int = 30, pidfile: str | None = None, no_color: bool | None = None, + loglevel: str = 'WARN', logfile: str | None = None, schedule: str | None = None, + scheduler: str | None = None, + scheduler_cls: str | None = None, # XXX use scheduler + redirect_stdouts: bool | None = None, + redirect_stdouts_level: str | None = None, + quiet: bool = False, **kwargs: Any) -> None: self.app = app = app or self.app either = self.app.either self.loglevel = loglevel @@ -67,11 +72,10 @@ def __init__(self, max_interval=None, app=None, enabled=not no_color if no_color is not None else no_color, ) self.pidfile = pidfile - if not isinstance(self.loglevel, numbers.Integral): self.loglevel = LOG_LEVELS[self.loglevel.upper()] - def run(self): + def run(self) -> None: if not self.quiet: print(str(self.colored.cyan( f'celery beat v{VERSION_BANNER} is starting.'))) @@ -79,14 +83,14 @@ def run(self): self.set_process_title() self.start_scheduler() - def setup_logging(self, colorize=None): + def setup_logging(self, colorize: bool | None = None) -> None: if colorize is None and self.no_color is not None: colorize = not self.no_color self.app.log.setup(self.loglevel, self.logfile, self.redirect_stdouts, self.redirect_stdouts_level, colorize=colorize) - def start_scheduler(self): + def start_scheduler(self) -> None: if self.pidfile: platforms.create_pidlock(self.pidfile) service = self.Service( @@ -113,7 +117,7 @@ def start_scheduler(self): exc_info=True) raise - def banner(self, service): + def banner(self, service: beat.Service) -> str: c = self.colored return str( c.blue('__ ', c.magenta('-'), @@ -122,13 +126,13 @@ def banner(self, service): c.reset(self.startup_info(service))), ) - def init_loader(self): + def init_loader(self) -> None: # Run the worker init handler. # (Usually imports task modules and such.) self.app.loader.init_worker() self.app.finalize() - def startup_info(self, service): + def startup_info(self, service: beat.Service) -> str: scheduler = service.get_scheduler(lazy=True) return STARTUP_INFO_FMT.format( conninfo=self.app.connection().as_uri(), @@ -142,15 +146,15 @@ def startup_info(self, service): max_interval=scheduler.max_interval, ) - def set_process_title(self): + def set_process_title(self) -> None: arg_start = 'manage' in sys.argv[0] and 2 or 1 platforms.set_process_title( 'celery beat', info=' '.join(sys.argv[arg_start:]), ) - def install_sync_handler(self, service): + def install_sync_handler(self, service: beat.Service) -> None: """Install a `SIGTERM` + `SIGINT` handler saving the schedule.""" - def _sync(signum, frame): + def _sync(signum: Signals, frame: FrameType) -> None: service.sync() raise SystemExit() platforms.signals.update(SIGTERM=_sync, SIGINT=_sync) diff --git a/celery/schedules.py b/celery/schedules.py index 89fb5a3c890..0f6389c3967 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -468,8 +468,9 @@ def day_out_of_range(year, month, day): return False def is_before_last_run(year, month, day): - return self.maybe_make_aware(datetime(year, month, day, next_hour, next_minute), - naive_as_utc=False) < last_run_at + return self.maybe_make_aware( + datetime(year, month, day, next_hour, next_minute), + naive_as_utc=False) < last_run_at def roll_over(): for _ in range(2000): diff --git a/pyproject.toml b/pyproject.toml index bbdb945a1a5..722fd58a68d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ files = [ "celery/concurrency/thread.py", "celery/security/certificate.py", "celery/utils/text.py", + "celery/apps/beat.py", ] [tool.coverage.run] diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index dd24ecc9708..b7ff69e4a97 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -194,7 +194,9 @@ def foo(): foo.apply_async = Mock(name='foo.apply_async') scheduler = mScheduler(app=self.app) - scheduler.apply_async(scheduler.Entry(task=foo.name, app=self.app, args=None, kwargs=None)) + scheduler.apply_async( + scheduler.Entry( + task=foo.name, app=self.app, args=None, kwargs=None)) foo.apply_async.assert_called() def test_apply_async_with_null_args_set_to_none(self): @@ -731,7 +733,8 @@ class test_Service: def get_service(self): Scheduler, mock_shelve = create_persistent_scheduler() - return beat.Service(app=self.app, scheduler_cls=Scheduler), mock_shelve + return beat.Service( + app=self.app, scheduler_cls=Scheduler), mock_shelve def test_pickleable(self): s = beat.Service(app=self.app, scheduler_cls=Mock) From 4cd1fe9bdfcd1ba0016be4e4d441afe10b0d6eb5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 12 Mar 2023 13:23:05 +0200 Subject: [PATCH 0327/1051] Fixed bug where retrying a task loses its stamps (#8120) * Added unit test test_retry_stamping() * Added test_task_retried_once() and retry_error() task to integration tests * Fixed bug where retrying a task loses its stamps --- celery/app/task.py | 7 ++++++- t/integration/test_canvas.py | 29 +++++++++++++++++++++++++++++ t/unit/tasks/test_stamping.py | 22 ++++++++++++++++++++++ 3 files changed, 57 insertions(+), 1 deletion(-) diff --git a/celery/app/task.py b/celery/app/task.py index 5a12c6df004..71ea2591e64 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -125,7 +125,7 @@ def __repr__(self): def as_execution_options(self): limit_hard, limit_soft = self.timelimit or (None, None) - return { + execution_options = { 'task_id': self.id, 'root_id': self.root_id, 'parent_id': self.parent_id, @@ -145,6 +145,11 @@ def as_execution_options(self): 'replaced_task_nesting': self.replaced_task_nesting, 'origin': self.origin, } + if hasattr(self, 'stamps') and hasattr(self, 'stamped_headers'): + if self.stamps is not None and self.stamped_headers is not None: + execution_options['stamps'] = self.stamps + execution_options['stamped_headers'] = self.stamped_headers + return execution_options @property def children(self): diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 337cbbe6c7f..a582dcef5a8 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3402,3 +3402,32 @@ def on_signature(self, sig, **headers): stamped_fail_sig.stamp(visitor=FixedMonitoringIdStampingVisitor("1234")) stamped_fail_sig.apply_async().get() assert assertion_result + + @flaky + def test_stamps_remain_on_task_retry(self, manager): + @task_received.connect + def task_received_handler(request, **kwargs): + nonlocal assertion_result + + try: + assertion_result = all( + [ + assertion_result, + all([stamped_header in request.stamps for stamped_header in request.stamped_headers]), + request.stamps["stamp"] == 42, + ] + ) + except Exception: + assertion_result = False + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"stamp": 42} + + stamped_task = retry_once.si() + stamped_task.stamp(visitor=CustomStampingVisitor()) + assertion_result = True + res = stamped_task.delay() + with pytest.raises(TimeoutError): + res.get(timeout=2) + assert assertion_result diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index 51e0e3e92e6..da8492b534b 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -427,6 +427,20 @@ def xprod(numbers): self.xprod = xprod + @self.app.task(bind=True, max_retries=3, iterations=0, shared=False) + def retry_task(self, arg1, arg2, kwarg=1, max_retries=None, care=True): + self.iterations += 1 + rmax = self.max_retries if max_retries is None else max_retries + + assert repr(self.request) + retries = self.request.retries + if care and retries >= rmax: + return arg1 + else: + raise self.retry(countdown=0, max_retries=rmax) + + self.retry_task = retry_task + @pytest.mark.parametrize( "stamping_visitor", @@ -1221,3 +1235,11 @@ def test_chord_stamping_one_level(self, subtests): with subtests.test("sig_2_res has stamped_headers", stamped_headers=["stamp"]): assert sorted(sig_2_res._get_task_meta()["stamped_headers"]) == sorted(["stamp"]) + + def test_retry_stamping(self): + self.retry_task.push_request() + self.retry_task.request.stamped_headers = ['stamp'] + self.retry_task.request.stamps = {'stamp': 'value'} + sig = self.retry_task.signature_from_request() + assert sig.options['stamped_headers'] == ['stamp'] + assert sig.options['stamps'] == {'stamp': 'value'} From ef7eddaa60c7e234ddf7e77b0ef23b944fa5bc88 Mon Sep 17 00:00:00 2001 From: Maxwell Muoto Date: Mon, 13 Mar 2023 05:16:11 -0500 Subject: [PATCH 0328/1051] Type hints for `celery/schedules.py` (#8114) * Type hints for schedules * Comment * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * small fix * Test fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * future import * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * linting fix * linting * re-add docstring * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Address comments * format * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/schedules.py | 131 ++++++++++++++++++++++++-------------------- pyproject.toml | 1 + 2 files changed, 74 insertions(+), 58 deletions(-) diff --git a/celery/schedules.py b/celery/schedules.py index 0f6389c3967..b35436ae74e 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -1,14 +1,17 @@ """Schedules define the intervals at which periodic tasks run.""" +from __future__ import annotations -import numbers import re from bisect import bisect, bisect_left from collections import namedtuple from collections.abc import Iterable -from datetime import datetime, timedelta +from datetime import datetime, timedelta, tzinfo +from typing import Any, Callable, Mapping, Sequence from kombu.utils.objects import cached_property +from celery import Celery + from . import current_app from .utils.collections import AttributeDict from .utils.time import (ffwd, humanize_seconds, localize, maybe_make_aware, maybe_timedelta, remaining, timezone, @@ -49,7 +52,7 @@ """ -def cronfield(s): +def cronfield(s: str) -> str: return '*' if s is None else s @@ -59,44 +62,45 @@ class ParseException(Exception): class BaseSchedule: - def __init__(self, nowfun=None, app=None): + def __init__(self, nowfun: Callable | None = None, app: Celery | None = None): self.nowfun = nowfun self._app = app - def now(self): + def now(self) -> datetime: return (self.nowfun or self.app.now)() - def remaining_estimate(self, last_run_at): + def remaining_estimate(self, last_run_at: datetime) -> timedelta: raise NotImplementedError() - def is_due(self, last_run_at): + def is_due(self, last_run_at: datetime) -> tuple[bool, datetime]: raise NotImplementedError() - def maybe_make_aware(self, dt, naive_as_utc=True): + def maybe_make_aware( + self, dt: datetime, naive_as_utc: bool = True) -> datetime: return maybe_make_aware(dt, self.tz, naive_as_utc=naive_as_utc) @property - def app(self): + def app(self) -> Celery: return self._app or current_app._get_current_object() @app.setter - def app(self, app): + def app(self, app: Celery) -> None: self._app = app @cached_property - def tz(self): + def tz(self) -> tzinfo: return self.app.timezone @cached_property - def utc_enabled(self): + def utc_enabled(self) -> bool: return self.app.conf.enable_utc - def to_local(self, dt): + def to_local(self, dt: datetime) -> datetime: if not self.utc_enabled: return timezone.to_local_fallback(dt) return dt - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, BaseSchedule): return other.nowfun == self.nowfun return NotImplemented @@ -114,20 +118,22 @@ class schedule(BaseSchedule): app (Celery): Celery app instance. """ - relative = False + relative: bool = False - def __init__(self, run_every=None, relative=False, nowfun=None, app=None): + def __init__(self, run_every: float | timedelta | None = None, + relative: bool = False, nowfun: Callable | None = None, app: Celery + | None = None) -> None: self.run_every = maybe_timedelta(run_every) self.relative = relative super().__init__(nowfun=nowfun, app=app) - def remaining_estimate(self, last_run_at): + def remaining_estimate(self, last_run_at: datetime) -> timedelta: return remaining( self.maybe_make_aware(last_run_at), self.run_every, self.maybe_make_aware(self.now()), self.relative, ) - def is_due(self, last_run_at): + def is_due(self, last_run_at: datetime) -> tuple[bool, datetime]: """Return tuple of ``(is_due, next_time_to_check)``. Notes: @@ -164,23 +170,24 @@ def is_due(self, last_run_at): return schedstate(is_due=True, next=self.seconds) return schedstate(is_due=False, next=remaining_s) - def __repr__(self): + def __repr__(self) -> str: return f'' - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, schedule): return self.run_every == other.run_every return self.run_every == other - def __reduce__(self): + def __reduce__(self) -> tuple[type, + tuple[timedelta, bool, Callable | None]]: return self.__class__, (self.run_every, self.relative, self.nowfun) @property - def seconds(self): + def seconds(self) -> int | float: return max(self.run_every.total_seconds(), 0) @property - def human_seconds(self): + def human_seconds(self) -> str: return humanize_seconds(self.seconds) @@ -238,17 +245,17 @@ class crontab_parser: _steps = r'/(\w+)?' _star = r'\*' - def __init__(self, max_=60, min_=0): + def __init__(self, max_: int = 60, min_: int = 0): self.max_ = max_ self.min_ = min_ - self.pats = ( + self.pats: tuple[tuple[re.Pattern, Callable], ...] = ( (re.compile(self._range + self._steps), self._range_steps), (re.compile(self._range), self._expand_range), (re.compile(self._star + self._steps), self._star_steps), (re.compile('^' + self._star + '$'), self._expand_star), ) - def parse(self, spec): + def parse(self, spec: str) -> set[int]: acc = set() for part in spec.split(','): if not part: @@ -256,14 +263,14 @@ def parse(self, spec): acc |= set(self._parse_part(part)) return acc - def _parse_part(self, part): + def _parse_part(self, part: str) -> list[int]: for regex, handler in self.pats: m = regex.match(part) if m: return handler(m.groups()) return self._expand_range((part,)) - def _expand_range(self, toks): + def _expand_range(self, toks: Sequence[str]) -> list[int]: fr = self._expand_number(toks[0]) if len(toks) > 1: to = self._expand_number(toks[1]) @@ -273,20 +280,20 @@ def _expand_range(self, toks): return list(range(fr, to + 1)) return [fr] - def _range_steps(self, toks): + def _range_steps(self, toks: Sequence[str]) -> list[int]: if len(toks) != 3 or not toks[2]: raise self.ParseException('empty filter') return self._expand_range(toks[:2])[::int(toks[2])] - def _star_steps(self, toks): + def _star_steps(self, toks: Sequence[str]) -> list[int]: if not toks or not toks[0]: raise self.ParseException('empty filter') return self._expand_star()[::int(toks[0])] - def _expand_star(self, *args): + def _expand_star(self, *args: Any) -> list[int]: return list(range(self.min_, self.max_ + self.min_)) - def _expand_number(self, s): + def _expand_number(self, s: str) -> int: if isinstance(s, str) and s[0] == '-': raise self.ParseException('negative numbers not supported') try: @@ -386,8 +393,8 @@ class crontab(BaseSchedule): present in ``month_of_year``. """ - def __init__(self, minute='*', hour='*', day_of_week='*', - day_of_month='*', month_of_year='*', **kwargs): + def __init__(self, minute: str = '*', hour: str = '*', day_of_week: str = '*', + day_of_month: str = '*', month_of_year: str = '*', **kwargs: Any) -> None: self._orig_minute = cronfield(minute) self._orig_hour = cronfield(hour) self._orig_day_of_week = cronfield(day_of_week) @@ -402,7 +409,9 @@ def __init__(self, minute='*', hour='*', day_of_week='*', super().__init__(**kwargs) @staticmethod - def _expand_cronspec(cronspec, max_, min_=0): + def _expand_cronspec( + cronspec: int | str | Iterable, + max_: int, min_: int = 0) -> set[Any]: """Expand cron specification. Takes the given cronspec argument in one of the forms: @@ -428,14 +437,14 @@ def _expand_cronspec(cronspec, max_, min_=0): day of month or month of year. The default is sufficient for minute, hour, and day of week. """ - if isinstance(cronspec, numbers.Integral): + if isinstance(cronspec, int): result = {cronspec} elif isinstance(cronspec, str): result = crontab_parser(max_, min_).parse(cronspec) elif isinstance(cronspec, set): result = cronspec elif isinstance(cronspec, Iterable): - result = set(cronspec) + result = set(cronspec) # type: ignore else: raise TypeError(CRON_INVALID_TYPE.format(type=type(cronspec))) @@ -446,7 +455,8 @@ def _expand_cronspec(cronspec, max_, min_=0): min=min_, max=max_ - 1 + min_, value=number)) return result - def _delta_to_next(self, last_run_at, next_hour, next_minute): + def _delta_to_next(self, last_run_at: datetime, next_hour: int, + next_minute: int) -> ffwd: """Find next delta. Takes a :class:`~datetime.datetime` of last run, next minute and hour, @@ -460,19 +470,19 @@ def _delta_to_next(self, last_run_at, next_hour, next_minute): days_of_month = sorted(self.day_of_month) months_of_year = sorted(self.month_of_year) - def day_out_of_range(year, month, day): + def day_out_of_range(year: int, month: int, day: int) -> bool: try: datetime(year=year, month=month, day=day) except ValueError: return True return False - def is_before_last_run(year, month, day): + def is_before_last_run(year: int, month: int, day: int) -> bool: return self.maybe_make_aware( datetime(year, month, day, next_hour, next_minute), naive_as_utc=False) < last_run_at - def roll_over(): + def roll_over() -> None: for _ in range(2000): flag = (datedata.dom == len(days_of_month) or day_out_of_range(datedata.year, @@ -522,22 +532,23 @@ def roll_over(): second=0, microsecond=0) - def __repr__(self): + def __repr__(self) -> str: return CRON_REPR.format(self) - def __reduce__(self): + def __reduce__(self) -> tuple[type, tuple[str, str, str, str, str], Any]: return (self.__class__, (self._orig_minute, self._orig_hour, self._orig_day_of_week, self._orig_day_of_month, self._orig_month_of_year), self._orig_kwargs) - def __setstate__(self, state): + def __setstate__(self, state: Mapping[str, Any]) -> None: # Calling super's init because the kwargs aren't necessarily passed in # the same form as they are stored by the superclass super().__init__(**state) - def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd): + def remaining_delta(self, last_run_at: datetime, tz: tzinfo | None = None, + ffwd: type = ffwd) -> tuple[datetime, Any, datetime]: # caching global ffwd last_run_at = self.maybe_make_aware(last_run_at) now = self.maybe_make_aware(self.now()) @@ -594,7 +605,8 @@ def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd): next_hour, next_minute) return self.to_local(last_run_at), delta, self.to_local(now) - def remaining_estimate(self, last_run_at, ffwd=ffwd): + def remaining_estimate( + self, last_run_at: datetime, ffwd: type = ffwd) -> timedelta: """Estimate of next run time. Returns when the periodic task should run next as a @@ -604,7 +616,7 @@ def remaining_estimate(self, last_run_at, ffwd=ffwd): # caching global ffwd return remaining(*self.remaining_delta(last_run_at, ffwd=ffwd)) - def is_due(self, last_run_at): + def is_due(self, last_run_at: datetime) -> tuple[bool, datetime]: """Return tuple of ``(is_due, next_time_to_run)``. If :setting:`beat_cron_starting_deadline` has been specified, the @@ -653,7 +665,7 @@ def is_due(self, last_run_at): rem = max(rem_delta.total_seconds(), 0) return schedstate(due, rem) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, crontab): return ( other.month_of_year == self.month_of_year and @@ -666,10 +678,12 @@ def __eq__(self, other): return NotImplemented -def maybe_schedule(s, relative=False, app=None): +def maybe_schedule( + s: int | float | timedelta | BaseSchedule, relative: bool = False, + app: Celery | None = None) -> float | timedelta | BaseSchedule: """Return schedule from number, timedelta, or actual schedule.""" if s is not None: - if isinstance(s, numbers.Number): + if isinstance(s, (float, int)): s = timedelta(seconds=s) if isinstance(s, timedelta): return schedule(s, relative, app=app) @@ -701,8 +715,8 @@ class solar(BaseSchedule): Arguments: event (str): Solar event that triggers this task. See note for available values. - lat (int): The latitude of the observer. - lon (int): The longitude of the observer. + lat (float): The latitude of the observer. + lon (float): The longitude of the observer. nowfun (Callable): Function returning the current date and time as a class:`~datetime.datetime`. app (Celery): Celery app instance. @@ -753,7 +767,8 @@ class solar(BaseSchedule): 'dusk_astronomical': True, } - def __init__(self, event, lat, lon, **kwargs): + def __init__(self, event: str, lat: int | float, lon: int | float, ** + kwargs: Any) -> None: self.ephem = __import__('ephem') self.event = event self.lat = lat @@ -780,15 +795,15 @@ def __init__(self, event, lat, lon, **kwargs): self.method = self._methods[event] self.use_center = self._use_center_l[event] - def __reduce__(self): + def __reduce__(self) -> tuple[type, tuple[str, int | float, int | float]]: return self.__class__, (self.event, self.lat, self.lon) - def __repr__(self): + def __repr__(self) -> str: return ''.format( self.event, self.lat, self.lon, ) - def remaining_estimate(self, last_run_at): + def remaining_estimate(self, last_run_at: datetime) -> timedelta: """Return estimate of next time to run. Returns: @@ -823,7 +838,7 @@ def remaining_estimate(self, last_run_at): delta = next - now return delta - def is_due(self, last_run_at): + def is_due(self, last_run_at: datetime) -> tuple[bool, datetime]: """Return tuple of ``(is_due, next_time_to_run)``. Note: @@ -840,7 +855,7 @@ def is_due(self, last_run_at): rem = max(rem_delta.total_seconds(), 0) return schedstate(due, rem) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, solar): return ( other.event == self.event and diff --git a/pyproject.toml b/pyproject.toml index 722fd58a68d..e4d3f6fd838 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ files = [ "celery/concurrency/thread.py", "celery/security/certificate.py", "celery/utils/text.py", + "celery/schedules.py", "celery/apps/beat.py", ] From 009f61bb12725aba81586b7d1c1c5a323e07a142 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 17:32:56 +0000 Subject: [PATCH 0329/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.0.1 → v1.1.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.0.1...v1.1.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d77e0c99509..bcf37dbaec7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.1 + rev: v1.1.1 hooks: - id: mypy pass_filenames: false From 9bee04140b2b9709c00b1c201feb87505bce2b5a Mon Sep 17 00:00:00 2001 From: marselester Date: Fri, 1 Jul 2022 23:22:06 -0400 Subject: [PATCH 0330/1051] Add a link to Gopher Celery --- README.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index dd34974d16e..fe84259088f 100644 --- a/README.rst +++ b/README.rst @@ -44,7 +44,7 @@ to high availability and horizontal scaling. Celery is written in Python, but the protocol can be implemented in any language. In addition to Python there's node-celery_ for Node.js, -a `PHP client`_, `gocelery`_ for golang, and rusty-celery_ for Rust. +a `PHP client`_, `gocelery`_, gopher-celery_ for Go, and rusty-celery_ for Rust. Language interoperability can also be achieved by using webhooks in such a way that the client enqueues an URL to be requested by a worker. @@ -52,6 +52,7 @@ in such a way that the client enqueues an URL to be requested by a worker. .. _node-celery: https://github.com/mher/node-celery .. _`PHP client`: https://github.com/gjedeer/celery-php .. _`gocelery`: https://github.com/gocelery/gocelery +.. _gopher-celery: https://github.com/marselester/gopher-celery .. _rusty-celery: https://github.com/rusty-celery/rusty-celery What do I need? From 3d9e1c7681fb64649583c816eb3e01b6996f793b Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 19 Mar 2023 13:43:36 +0600 Subject: [PATCH 0331/1051] Update sqlalchemy.txt --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 8e2b106495c..1191b6925c4 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy==1.4.45 +sqlalchemy>=1.4.47,<2.0 From 3d143a99bf0c4968ed5370fba30b4cc37ab41c9f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 19 Mar 2023 15:10:04 +0600 Subject: [PATCH 0332/1051] azure-storage-blob 12.15.0 --- requirements/extras/azureblockblob.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/azureblockblob.txt b/requirements/extras/azureblockblob.txt index a0088f759cb..f8329f38c8d 100644 --- a/requirements/extras/azureblockblob.txt +++ b/requirements/extras/azureblockblob.txt @@ -1 +1 @@ -azure-storage-blob>=12.11.0 +azure-storage-blob>=12.15.0 From a80da3965fefcf9c7638c0a264314cd194a71d1f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 20 Mar 2023 18:29:36 +0600 Subject: [PATCH 0333/1051] test kombu 5.3.0b3 (#8138) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index f159c7bce7f..f03f9b8e03a 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ pytz>=2021.3 billiard>=4.1.0,<5.0 -kombu>=5.3.0b2,<6.0 +kombu>=5.3.0b3,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 347553f926e2304969e1d29d039c12059cbf1465 Mon Sep 17 00:00:00 2001 From: Bidaya0 Date: Thu, 23 Mar 2023 16:17:33 +0800 Subject: [PATCH 0334/1051] fix: add expire string parse. (#8134) * fix: add expire string parse. * Update base.py * Update base.py * test: add expires string test. * lint: fix lint error. * fix: fix if-else condition. --- celery/app/base.py | 3 +++ t/unit/app/test_app.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/celery/app/base.py b/celery/app/base.py index 3f8b2ec0a70..1dfed239b36 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -738,6 +738,9 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, if isinstance(expires, datetime): expires_s = (maybe_make_aware( expires) - self.now()).total_seconds() + elif isinstance(expires, str): + expires_s = (maybe_make_aware( + datetime.fromisoformat(expires)) - self.now()).total_seconds() else: expires_s = expires diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 664a0ea6b7c..98ade750713 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1067,6 +1067,12 @@ def test_thread_backend_thread_safe(self): assert isinstance(thread_backend, Backend) assert main_backend is thread_backend + def test_send_task_expire_as_string(self): + try: + self.app.send_task('foo', (1, 2), expires='2023-03-16T17:21:20.663973') + except TypeError as e: + pytest.fail(f'raise unexcepted error {e}') + class test_defaults: From ab34d34fecf0becc8f2b578fe769eefb74110ace Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:26:55 +0100 Subject: [PATCH 0335/1051] Fix worker crash on un-pickleable exceptions (#8133) * Fix worker crash on unpickleable exceptions * Move logic to wrap unpicklable exception into the Retry class (revert modifications to handle_retry) * Add test and fix handle_ignore not representing the wrapped exception correctly --------- Co-authored-by: Alessio Bogon --- celery/app/trace.py | 2 +- celery/exceptions.py | 4 +++- celery/utils/serialization.py | 4 +++- t/unit/tasks/test_tasks.py | 30 ++++++++++++++++++++++++++++++ 4 files changed, 37 insertions(+), 3 deletions(-) diff --git a/celery/app/trace.py b/celery/app/trace.py index 058b8997eef..96d35c829f9 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -219,7 +219,7 @@ def handle_failure(self, task, req, store_errors=True, call_errbacks=True): exc = self.retval # make sure we only send pickleable exceptions back to parent. einfo = ExceptionInfo() - einfo.exception = get_pickleable_exception(einfo.exception) + einfo.exception.exc = get_pickleable_exception(einfo.exception.exc) einfo.type = get_pickleable_etype(einfo.type) task.backend.mark_as_failure( diff --git a/celery/exceptions.py b/celery/exceptions.py index 9b6129c19cd..3203e9f49ea 100644 --- a/celery/exceptions.py +++ b/celery/exceptions.py @@ -96,6 +96,8 @@ 'CeleryCommandException', ) +from celery.utils.serialization import get_pickleable_exception + UNREGISTERED_FMT = """\ Task of kind {0} never registered, please make sure it's imported.\ """ @@ -160,7 +162,7 @@ def __init__(self, message=None, exc=None, when=None, is_eager=False, if isinstance(exc, str): self.exc, self.excs = None, exc else: - self.exc, self.excs = exc, safe_repr(exc) if exc else None + self.exc, self.excs = get_pickleable_exception(exc), safe_repr(exc) if exc else None self.when = when self.is_eager = is_eager self.sig = sig diff --git a/celery/utils/serialization.py b/celery/utils/serialization.py index c03a20f9419..12fc9dabd00 100644 --- a/celery/utils/serialization.py +++ b/celery/utils/serialization.py @@ -128,7 +128,9 @@ class UnpickleableExceptionWrapper(Exception): exc_args = None def __init__(self, exc_module, exc_cls_name, exc_args, text=None): - safe_exc_args = ensure_serializable(exc_args, pickle.dumps) + safe_exc_args = ensure_serializable( + exc_args, lambda v: pickle.loads(pickle.dumps(v)) + ) self.exc_module = exc_module self.exc_cls_name = exc_cls_name self.exc_args = safe_exc_args diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 0095bac3405..f5d6a4ec9fc 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -13,6 +13,7 @@ from celery.contrib.testing.mocks import ContextMock from celery.exceptions import Ignore, ImproperlyConfigured, Retry from celery.result import AsyncResult, EagerResult +from celery.utils.serialization import UnpickleableExceptionWrapper try: from urllib.error import HTTPError @@ -215,6 +216,13 @@ def retry_task_customexc(self, arg1, arg2, kwarg=1, **kwargs): self.retry_task_customexc = retry_task_customexc + @self.app.task(bind=True, max_retries=3, iterations=0, shared=False) + def retry_task_unpickleable_exc(self, foo, bar): + self.iterations += 1 + raise self.retry(countdown=0, exc=UnpickleableException(foo, bar)) + + self.retry_task_unpickleable_exc = retry_task_unpickleable_exc + @self.app.task(bind=True, autoretry_for=(ZeroDivisionError,), shared=False) def autoretry_task_no_kwargs(self, a, b): @@ -389,6 +397,13 @@ class MyCustomException(Exception): """Random custom exception.""" +class UnpickleableException(Exception): + """Exception that doesn't survive a pickling roundtrip (dump + load).""" + def __init__(self, foo, bar): + super().__init__(foo) + self.bar = bar + + class test_task_retries(TasksCase): def test_retry(self): @@ -540,6 +555,21 @@ def test_retry_with_custom_exception(self): result.get() assert self.retry_task_customexc.iterations == 3 + def test_retry_with_unpickleable_exception(self): + self.retry_task_unpickleable_exc.max_retries = 2 + self.retry_task_unpickleable_exc.iterations = 0 + + result = self.retry_task_unpickleable_exc.apply( + ["foo", "bar"] + ) + with pytest.raises(UnpickleableExceptionWrapper) as exc_info: + result.get() + + assert self.retry_task_unpickleable_exc.iterations == 3 + + exc_wrapper = exc_info.value + assert exc_wrapper.exc_args == ("foo", ) + def test_max_retries_exceeded(self): self.retry_task.max_retries = 2 self.retry_task.iterations = 0 From c571848023be732a1a11d46198cf831a522cfb54 Mon Sep 17 00:00:00 2001 From: woutdenolf Date: Thu, 23 Mar 2023 15:26:02 +0100 Subject: [PATCH 0336/1051] avoid text rewrapping by click (#8152) --- celery/bin/worker.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/celery/bin/worker.py b/celery/bin/worker.py index 6a4b5533692..9dd1582030e 100644 --- a/celery/bin/worker.py +++ b/celery/bin/worker.py @@ -300,8 +300,11 @@ def worker(ctx, hostname=None, pool_cls=None, app=None, uid=None, gid=None, **kwargs): """Start worker instance. + \b Examples -------- + + \b $ celery --app=proj worker -l INFO $ celery -A proj worker -l INFO -Q hipri,lopri $ celery -A proj worker --concurrency=4 From 745194999a70f1bb8ae8ba70daeb407f95b01b5c Mon Sep 17 00:00:00 2001 From: Iuri de Silvio Date: Thu, 23 Mar 2023 14:21:24 -0300 Subject: [PATCH 0337/1051] Warn when an unnamed periodic task override another one. (#8143) * Warn when an unnamed periodic task override another one. * Docs. --- celery/app/base.py | 22 ++++++++++++++++++---- docs/userguide/periodic-tasks.rst | 5 +++++ t/unit/app/test_app.py | 27 +++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 1dfed239b36..cfd71c627fb 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -1006,7 +1006,8 @@ def _load_config(self): # load lazy periodic tasks pending_beat = self._pending_periodic_tasks while pending_beat: - self._add_periodic_task(*pending_beat.popleft()) + periodic_task_args, periodic_task_kwargs = pending_beat.popleft() + self._add_periodic_task(*periodic_task_args, **periodic_task_kwargs) self.on_after_configure.send(sender=self, source=self._conf) return self._conf @@ -1026,12 +1027,19 @@ def signature(self, *args, **kwargs): def add_periodic_task(self, schedule, sig, args=(), kwargs=(), name=None, **opts): + """ + Add a periodic task to beat schedule. + + Celery beat store tasks based on `sig` or `name` if provided. Adding the + same signature twice make the second task override the first one. To + avoid the override, use distinct `name` for them. + """ key, entry = self._sig_to_periodic_task_entry( schedule, sig, args, kwargs, name, **opts) if self.configured: - self._add_periodic_task(key, entry) + self._add_periodic_task(key, entry, name=name) else: - self._pending_periodic_tasks.append((key, entry)) + self._pending_periodic_tasks.append([(key, entry), {"name": name}]) return key def _sig_to_periodic_task_entry(self, schedule, sig, @@ -1048,7 +1056,13 @@ def _sig_to_periodic_task_entry(self, schedule, sig, 'options': dict(sig.options, **opts), } - def _add_periodic_task(self, key, entry): + def _add_periodic_task(self, key, entry, name=None): + if name is None and key in self._conf.beat_schedule: + logger.warning( + f"Periodic task key='{key}' shadowed a previous unnamed periodic task." + " Pass a name kwarg to add_periodic_task to silence this warning." + ) + self._conf.beat_schedule[key] = entry def create_task_cls(self): diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index 089135273bd..b55799d2fe6 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -94,6 +94,11 @@ beat schedule list. # Calls test('hello') every 10 seconds. sender.add_periodic_task(10.0, test.s('hello'), name='add every 10') + # Calls test('hello') every 30 seconds. + # It uses the same signature of previous task, an explicit name is + # defined to avoid this task replacing the previous one defined. + sender.add_periodic_task(30.0, test.s('hello'), name='add every 30') + # Calls test('world') every 30 seconds sender.add_periodic_task(30.0, test.s('world'), expires=10) diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 98ade750713..c63ff17c16d 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -916,6 +916,33 @@ def add(x, y): assert 'add1' in self.app.conf.beat_schedule assert 'add2' in self.app.conf.beat_schedule + def test_add_periodic_task_expected_override(self): + + @self.app.task + def add(x, y): + pass + sig = add.s(2, 2) + self.app.add_periodic_task(10, sig, name='add1', expires=3) + self.app.add_periodic_task(20, sig, name='add1', expires=3) + assert 'add1' in self.app.conf.beat_schedule + assert len(self.app.conf.beat_schedule) == 1 + + def test_add_periodic_task_unexpected_override(self, caplog): + + @self.app.task + def add(x, y): + pass + sig = add.s(2, 2) + self.app.add_periodic_task(10, sig, expires=3) + self.app.add_periodic_task(20, sig, expires=3) + + assert len(self.app.conf.beat_schedule) == 1 + assert caplog.records[0].message == ( + "Periodic task key='t.unit.app.test_app.add(2, 2)' shadowed a" + " previous unnamed periodic task. Pass a name kwarg to" + " add_periodic_task to silence this warning." + ) + @pytest.mark.masked_modules('multiprocessing.util') def test_pool_no_multiprocessing(self, mask_modules): pool = self.app.pool From 3ce5b85806104e14f75a377fadc4de3e50038396 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Fri, 24 Mar 2023 07:21:45 +0100 Subject: [PATCH 0338/1051] Fix `Task.handle_ignore` not wrapping exceptions properly (#8149) * * Fix Task.handle_failure not wrapping the exception correctly when unpickleable. * Add tests * Access the value only once * Add docstrings * `orig_exc.__traceback__` may be None, fallback to sys.exc_info() This can happen when `get_pickleable_exception` behaviour decides to give back a different exception * Fix `from_exception` when the object is not an Exception * Fix test --- celery/app/trace.py | 51 ++++++++++++++------------- celery/utils/serialization.py | 13 ++++--- t/integration/tasks.py | 25 +++++++++++++ t/integration/test_tasks.py | 66 ++++++++++++++++++++++++++++++++--- t/unit/tasks/test_tasks.py | 1 + 5 files changed, 123 insertions(+), 33 deletions(-) diff --git a/celery/app/trace.py b/celery/app/trace.py index 96d35c829f9..df949ce2cdb 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -214,30 +214,33 @@ def handle_retry(self, task, req, store_errors=True, **kwargs): def handle_failure(self, task, req, store_errors=True, call_errbacks=True): """Handle exception.""" - _, _, tb = sys.exc_info() - try: - exc = self.retval - # make sure we only send pickleable exceptions back to parent. - einfo = ExceptionInfo() - einfo.exception.exc = get_pickleable_exception(einfo.exception.exc) - einfo.type = get_pickleable_etype(einfo.type) - - task.backend.mark_as_failure( - req.id, exc, einfo.traceback, - request=req, store_result=store_errors, - call_errbacks=call_errbacks, - ) - - task.on_failure(exc, req.id, req.args, req.kwargs, einfo) - signals.task_failure.send(sender=task, task_id=req.id, - exception=exc, args=req.args, - kwargs=req.kwargs, - traceback=tb, - einfo=einfo) - self._log_error(task, req, einfo) - return einfo - finally: - del tb + orig_exc = self.retval + + exc = get_pickleable_exception(orig_exc) + if exc.__traceback__ is None: + # `get_pickleable_exception` may have created a new exception without + # a traceback. + _, _, exc.__traceback__ = sys.exc_info() + + exc_type = get_pickleable_etype(orig_exc) + + # make sure we only send pickleable exceptions back to parent. + einfo = ExceptionInfo(exc_info=(exc_type, exc, exc.__traceback__)) + + task.backend.mark_as_failure( + req.id, exc, einfo.traceback, + request=req, store_result=store_errors, + call_errbacks=call_errbacks, + ) + + task.on_failure(exc, req.id, req.args, req.kwargs, einfo) + signals.task_failure.send(sender=task, task_id=req.id, + exception=exc, args=req.args, + kwargs=req.kwargs, + traceback=exc.__traceback__, + einfo=einfo) + self._log_error(task, req, einfo) + return einfo def _log_error(self, task, req, einfo): eobj = einfo.exception = get_pickled_exception(einfo.exception) diff --git a/celery/utils/serialization.py b/celery/utils/serialization.py index 12fc9dabd00..6c6b3b76f94 100644 --- a/celery/utils/serialization.py +++ b/celery/utils/serialization.py @@ -147,10 +147,15 @@ def __str__(self): @classmethod def from_exception(cls, exc): - return cls(exc.__class__.__module__, - exc.__class__.__name__, - getattr(exc, 'args', []), - safe_repr(exc)) + res = cls( + exc.__class__.__module__, + exc.__class__.__name__, + getattr(exc, 'args', []), + safe_repr(exc) + ) + if hasattr(exc, "__traceback__"): + res = res.with_traceback(exc.__traceback__) + return res def get_pickleable_exception(exc): diff --git a/t/integration/tasks.py b/t/integration/tasks.py index dac9455c38e..24dedbce29c 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -211,6 +211,12 @@ def retry(self, return_value=None): raise self.retry(exc=ExpectedException(), countdown=5) +@shared_task(bind=True, default_retry_delay=1) +def retry_unpickleable(self, foo, bar, *, retry_kwargs): + """Task that fails with an unpickleable exception and is retried.""" + raise self.retry(exc=UnpickleableException(foo, bar), **retry_kwargs) + + @shared_task(bind=True, expires=120.0, max_retries=1) def retry_once(self, *args, expires=None, max_retries=1, countdown=0.1): """Task that fails and is retried. Returns the number of retries.""" @@ -319,6 +325,19 @@ def __hash__(self): return hash(self.args) +class UnpickleableException(Exception): + """Exception that doesn't survive a pickling roundtrip (dump + load).""" + def __init__(self, foo, bar=None): + if bar is None: + # We define bar with a default value in the signature so that + # it's easier to add a break point here to find out when the + # exception is being unpickled. + raise TypeError("bar must be provided") + + super().__init__(foo) + self.bar = bar + + @shared_task def fail(*args): """Task that simply raises ExpectedException.""" @@ -326,6 +345,12 @@ def fail(*args): raise ExpectedException(*args) +@shared_task() +def fail_unpickleable(foo, bar): + """Task that raises an unpickleable exception.""" + raise UnpickleableException(foo, bar) + + @shared_task(bind=True) def fail_replaced(self, *args): """Replace this task with one which raises ExpectedException.""" diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 5eea4d88e9e..52b56be92f3 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,3 +1,4 @@ +import time from datetime import datetime, timedelta from time import perf_counter, sleep from uuid import uuid4 @@ -7,12 +8,13 @@ import celery from celery import chain, chord, group from celery.canvas import StampingVisitor +from celery.utils.serialization import UnpickleableExceptionWrapper from celery.worker import state as worker_state from .conftest import get_active_redis_channels from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, - print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, return_properties, - sleeping) + fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, + retry_unpickleable, return_properties, sleeping) TIMEOUT = 10 @@ -327,16 +329,19 @@ def test_wrong_arguments(self, manager): result.get(timeout=5) assert result.status == 'FAILURE' - @pytest.mark.xfail(reason="Retry failed on rpc backend", strict=False) def test_retry(self, manager): """Tests retrying of task.""" # Tests when max. retries is reached result = retry.delay() - for _ in range(5): + + tik = time.monotonic() + while time.monotonic() < tik + 5: status = result.status if status != 'PENDING': break - sleep(1) + sleep(0.1) + else: + raise AssertionError("Timeout while waiting for the task to be retried") assert status == 'RETRY' with pytest.raises(ExpectedException): result.get() @@ -353,6 +358,57 @@ def test_retry(self, manager): assert result.get() == 'bar' assert result.status == 'SUCCESS' + def test_retry_with_unpickleable_exception(self, manager): + """Test a task that retries with an unpickleable exception. + + We expect to be able to fetch the result (exception) correctly. + """ + + job = retry_unpickleable.delay( + "foo", + "bar", + retry_kwargs={"countdown": 10, "max_retries": 1}, + ) + + # Wait for the task to raise the Retry exception + tik = time.monotonic() + while time.monotonic() < tik + 5: + status = job.status + if status != 'PENDING': + break + sleep(0.1) + else: + raise AssertionError("Timeout while waiting for the task to be retried") + + assert status == 'RETRY' + + # Get the exception + res = job.result + assert job.status == 'RETRY' # make sure that it wasn't completed yet + + # Check it + assert isinstance(res, UnpickleableExceptionWrapper) + assert res.exc_cls_name == "UnpickleableException" + assert res.exc_args == ("foo",) + + job.revoke() + + def test_fail_with_unpickleable_exception(self, manager): + """Test a task that fails with an unpickleable exception. + + We expect to be able to fetch the result (exception) correctly. + """ + result = fail_unpickleable.delay("foo", "bar") + + with pytest.raises(UnpickleableExceptionWrapper) as exc_info: + result.get() + + exc_wrapper = exc_info.value + assert exc_wrapper.exc_cls_name == "UnpickleableException" + assert exc_wrapper.exc_args == ("foo",) + + assert result.status == 'FAILURE' + @flaky def test_task_accepted(self, manager, sleep=1): r1 = sleeping.delay(sleep) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index f5d6a4ec9fc..c90d9cdd0f0 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -568,6 +568,7 @@ def test_retry_with_unpickleable_exception(self): assert self.retry_task_unpickleable_exc.iterations == 3 exc_wrapper = exc_info.value + assert exc_wrapper.exc_cls_name == "UnpickleableException" assert exc_wrapper.exc_args == ("foo", ) def test_max_retries_exceeded(self): From 738caba02f7ee564b0a4a6947d5d8efff288b0b3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 24 Mar 2023 21:01:45 +0300 Subject: [PATCH 0339/1051] Hotfix for retrying a task with stamps (Original fix that introduced new bug: #8120) (#8158) --- celery/app/task.py | 3 ++- t/integration/test_canvas.py | 3 +-- t/unit/tasks/test_stamping.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 71ea2591e64..21698f5ed6b 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -147,8 +147,9 @@ def as_execution_options(self): } if hasattr(self, 'stamps') and hasattr(self, 'stamped_headers'): if self.stamps is not None and self.stamped_headers is not None: - execution_options['stamps'] = self.stamps execution_options['stamped_headers'] = self.stamped_headers + for k, v in self.stamps.items(): + execution_options[k] = v return execution_options @property diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index a582dcef5a8..78105d7ef9e 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3428,6 +3428,5 @@ def on_signature(self, sig, **headers) -> dict: stamped_task.stamp(visitor=CustomStampingVisitor()) assertion_result = True res = stamped_task.delay() - with pytest.raises(TimeoutError): - res.get(timeout=2) + res.get(timeout=TIMEOUT) assert assertion_result diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index da8492b534b..da167bd0bc3 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -1242,4 +1242,4 @@ def test_retry_stamping(self): self.retry_task.request.stamps = {'stamp': 'value'} sig = self.retry_task.signature_from_request() assert sig.options['stamped_headers'] == ['stamp'] - assert sig.options['stamps'] == {'stamp': 'value'} + assert sig.options['stamp'] == 'value' From 716926d44dba7a3479962af44b94a9b2f6c63e38 Mon Sep 17 00:00:00 2001 From: Alessio Bogon <778703+youtux@users.noreply.github.com> Date: Sun, 26 Mar 2023 16:41:33 +0200 Subject: [PATCH 0340/1051] Fix integration test (#8156) * Fix test * Update t/integration/test_tasks.py * Revert "Update t/integration/test_tasks.py" This reverts commit ffa4f24b06e9b785ed660a51346c2808cfcedc7f. * Try a bigger timeout * Revert "Try a bigger timeout" This reverts commit 20275143bceff8a7f9ed74b90050a5bdf83fa97c. * Mark again test as xfail for rpc backend --------- Co-authored-by: Asif Saif Uddin --- t/integration/test_tasks.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 52b56be92f3..3a2432114e2 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -11,7 +11,7 @@ from celery.utils.serialization import UnpickleableExceptionWrapper from celery.worker import state as worker_state -from .conftest import get_active_redis_channels +from .conftest import TEST_BACKEND, get_active_redis_channels from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, retry_unpickleable, return_properties, sleeping) @@ -329,6 +329,11 @@ def test_wrong_arguments(self, manager): result.get(timeout=5) assert result.status == 'FAILURE' + @pytest.mark.xfail( + condition=TEST_BACKEND == "rpc", + reason="Retry failed on rpc backend", + strict=False, + ) def test_retry(self, manager): """Tests retrying of task.""" # Tests when max. retries is reached @@ -349,11 +354,15 @@ def test_retry(self, manager): # Tests when task is retried but after returns correct result result = retry.delay(return_value='bar') - for _ in range(5): + + tik = time.monotonic() + while time.monotonic() < tik + 5: status = result.status if status != 'PENDING': break - sleep(1) + sleep(0.1) + else: + raise AssertionError("Timeout while waiting for the task to be retried") assert status == 'RETRY' assert result.get() == 'bar' assert result.status == 'SUCCESS' From 24f3aae07fcdcea348c1bef60fe7660e0a215c9f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 28 Mar 2023 22:13:21 +0300 Subject: [PATCH 0341/1051] Fixed bug in revoke_by_stamped_headers where impl did not match doc (#8162) * Fixed bug in revoke_by_stamped_headers where impl did not match doc (doc was right) * Fixed a bug where headers of type list were changed to a tuple instead of a dict (when used via CLI) --- celery/worker/control.py | 29 +++++++++++++-------- t/unit/worker/test_control.py | 47 +++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 10 deletions(-) diff --git a/celery/worker/control.py b/celery/worker/control.py index 89a4feb2c63..7fb0622ef2b 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -167,7 +167,7 @@ def revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kw # Outside of this scope that is a function. # supports list argument since 3.1 if isinstance(headers, list): - headers = {h.split('=')[0]: h.split('=')[1] for h in headers}, None + headers = {h.split('=')[0]: h.split('=')[1] for h in headers} worker_state.revoked_headers.update(headers) @@ -175,22 +175,31 @@ def revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kw return ok(f'headers {headers} flagged as revoked') task_ids = set() - requests = list(worker_state.active_requests) + active_requests = list(worker_state.active_requests) # Terminate all running tasks of matching headers - if requests: + if active_requests: warnings.warn( "Terminating tasks by headers does not scale well when worker concurrency is high", CeleryWarning ) - for req in requests: - if req.stamped_headers: - for stamped_header_key, expected_header_value in headers.items(): - if stamped_header_key in req.stamped_headers and \ - stamped_header_key in req._message.headers['stamps']: - actual_header = req._message.headers['stamps'][stamped_header_key] - if expected_header_value in actual_header: + # Go through all active requests, and check if one of the + # requests has a stamped header that matches the given headers to revoke + + req: Request + for req in active_requests: + # Check stamps exist + if req.stamped_headers and req.stamps: + # if so, check if any of the stamped headers match the given headers + for expected_header_key, expected_header_value in headers.items(): + if expected_header_key in req.stamps: + actual_header = req.stamps[expected_header_key] + # Check any possible match regardless if the stamps are a sequence or not + if any([ + header in maybe_list(expected_header_value) + for header in maybe_list(actual_header) + ]): task_ids.add(req.task_id) continue diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index a1761a1cb01..f5086f17f93 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -562,6 +562,53 @@ def test_revoke_by_stamped_headers_terminate(self): finally: worker_state.task_ready(request) + @pytest.mark.parametrize( + "header_to_revoke", + [ + {'header_A': 'value_1'}, + {'header_B': ['value_2', 'value_3']}, + {'header_C': ('value_2', 'value_3')}, + {'header_D': {'value_2', 'value_3'}}, + {'header_E': [1, '2', 3.0]}, + ], + ) + def test_revoke_by_stamped_headers(self, header_to_revoke): + ids = [] + + # Create at least more than one request with the same stamped header + for _ in range(2): + headers = { + "id": uuid(), + "task": self.mytask.name, + "stamped_headers": header_to_revoke.keys(), + "stamps": header_to_revoke, + } + ids.append(headers["id"]) + message = self.TaskMessage( + self.mytask.name, + "do re mi", + ) + message.headers.update(headers) + request = Request( + message, + app=self.app, + ) + + # Add the request to the active_requests so the request is found + # when the revoke_by_stamped_headers is called + worker_state.active_requests.add(request) + worker_state.task_reserved(request) + + state = self.create_state() + state.consumer = Mock() + # Revoke by header + revoked_headers.clear() + r = control.revoke_by_stamped_headers(state, header_to_revoke, terminate=True) + # Check all of the requests were revoked by a single header + assert all([id in r['ok'] for id in ids]), "All requests should be revoked" + assert revoked_headers == header_to_revoke + revoked_headers.clear() + def test_autoscale(self): self.panel.state.consumer = Mock() self.panel.state.consumer.controller = Mock() From 115b33ee8febc4911f14ba0bbedc99047eb52cf1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Mar 2023 00:29:22 +0300 Subject: [PATCH 0342/1051] Align revoke and revoke_by_stamped_headers return values when terminate is True (#8163) --- celery/worker/control.py | 2 ++ t/unit/worker/test_control.py | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/celery/worker/control.py b/celery/worker/control.py index 7fb0622ef2b..6676fe71033 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -148,6 +148,8 @@ def revoke(state, task_id, terminate=False, signal=None, **kwargs): # supports list argument since 3.1 task_ids, task_id = set(maybe_list(task_id) or []), None task_ids = _revoke(state, task_ids, terminate, signal, **kwargs) + if isinstance(task_ids, dict) and 'ok' in task_ids: + return task_ids return ok(f'tasks {task_ids} flagged as revoked') diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index f5086f17f93..10c964cab39 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -609,6 +609,31 @@ def test_revoke_by_stamped_headers(self, header_to_revoke): assert revoked_headers == header_to_revoke revoked_headers.clear() + def test_revoke_return_value_terminate_true(self): + header_to_revoke = {'foo': 'bar'} + headers = { + "id": uuid(), + "task": self.mytask.name, + "stamped_headers": header_to_revoke.keys(), + "stamps": header_to_revoke, + } + message = self.TaskMessage( + self.mytask.name, + "do re mi", + ) + message.headers.update(headers) + request = Request( + message, + app=self.app, + ) + worker_state.active_requests.add(request) + worker_state.task_reserved(request) + state = self.create_state() + state.consumer = Mock() + r = control.revoke(state, headers["id"], terminate=True) + r_headers = control.revoke_by_stamped_headers(state, header_to_revoke, terminate=True) + assert r["ok"] == r_headers["ok"] + def test_autoscale(self): self.panel.state.consumer = Mock() self.panel.state.consumer.controller = Mock() From ed71ebb2addd0579e2f64e15e9d44ec7a1e31434 Mon Sep 17 00:00:00 2001 From: Trenton H <797416+stumpylog@users.noreply.github.com> Date: Wed, 29 Mar 2023 09:45:02 -0700 Subject: [PATCH 0343/1051] Update & simplify GHA pip caching (#8164) * Updates the workflow to use actions/setup-python caching instead of manual caching * Also add cache handling for the integration jobs --- .github/workflows/python-package.yml | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ad0c68722f9..ab135fefc7f 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -46,19 +46,8 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: Cache - uses: actions/cache@v3.0.2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: - ${{ matrix.python-version }}-${{matrix.os}}-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ matrix.python-version }}-${{matrix.os}} + cache: 'pip' + cache-dependency-path: '**/setup.py' - name: Install tox run: python -m pip install --upgrade pip tox tox-gh-actions @@ -114,11 +103,8 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" + cache: 'pip' + cache-dependency-path: '**/setup.py' - name: Install tox run: python -m pip install --upgrade pip tox tox-gh-actions - name: > From 2e168af493fb741e9ddc7261780a0070927351ed Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 30 Mar 2023 13:09:07 +0600 Subject: [PATCH 0344/1051] Update auth.txt (#8167) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index f50d2fca306..bb6e5788554 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==39.0.1 +cryptography==40.0.1 From 342ebea68be994c46907a8ec9fd30dc8bed4c362 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 3 Apr 2023 15:11:39 +0600 Subject: [PATCH 0345/1051] Update test.txt versions (#8173) --- requirements/test.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index a766f6c7c17..6aa00249200 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,13 +1,13 @@ pytest==7.2.2 pytest-celery==0.0.0 pytest-subtests==0.10.0 -pytest-timeout~=2.1.0 +pytest-timeout==2.1.0 pytest-click==1.1.0 -pytest-order==1.0.1 -boto3>=1.9.178 -moto>=2.2.6 +pytest-order==1.1.0 +boto3===1.26.104 +moto==4.1.6 # typing extensions -mypy==1.0.1; platform_python_implementation=="CPython" +mypy==1.1.1; platform_python_implementation=="CPython" pre-commit==2.21.0 -r extras/yaml.txt -r extras/msgpack.txt From 74f66ff8e4b9a12bcb7b5c4dfe4be78ab450d5e3 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 4 Apr 2023 13:15:15 +0600 Subject: [PATCH 0346/1051] remove extra = from test.txt (#8179) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 6aa00249200..aa5c85f3633 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ pytest-subtests==0.10.0 pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 -boto3===1.26.104 +boto3==1.26.104 moto==4.1.6 # typing extensions mypy==1.1.1; platform_python_implementation=="CPython" From 025c1d4f9a1208a9d3bb01b1129a0f48234d3df0 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 4 Apr 2023 14:07:26 +0600 Subject: [PATCH 0347/1051] Update sqs.txt kombu[sqs]>=5.3.0b3 (#8174) * Update sqs.txt kombu[sqs]>=5.3.0b3 * Update requirements/extras/sqs.txt --- requirements/extras/sqs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index b4d8f05da78..3b76a17bbd0 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1 @@ -kombu[sqs]~=5.2.4 +kombu[sqs]~=5.3.0b3 From 37d7a13109632f972960d6f3b7dea14763193951 Mon Sep 17 00:00:00 2001 From: jaroslawporada <78016744+jaroslawporada@users.noreply.github.com> Date: Sat, 8 Apr 2023 09:27:22 +0200 Subject: [PATCH 0348/1051] Added signal triggered before fork (#8177) * Added signal triggered before fork * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update docs/userguide/signals.rst * Added unit test and enhanced doc for worker_before_create_process signal * Updated docs/userguide/signals.rst --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/concurrency/asynpool.py | 2 ++ celery/signals.py | 14 ++++++++------ docs/userguide/signals.rst | 14 ++++++++++++++ t/unit/concurrency/test_prefork.py | 12 ++++++++++++ 4 files changed, 36 insertions(+), 6 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 19715005828..c024e685f8a 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -36,6 +36,7 @@ from kombu.utils.functional import fxrange from vine import promise +from celery.signals import worker_before_create_process from celery.utils.functional import noop from celery.utils.log import get_logger from celery.worker import state as worker_state @@ -476,6 +477,7 @@ def __init__(self, processes=None, synack=False, ) def _create_worker_process(self, i): + worker_before_create_process.send(sender=self) gc.collect() # Issue #2927 return super()._create_worker_process(i) diff --git a/celery/signals.py b/celery/signals.py index 9be4f55a52f..290fa2ba858 100644 --- a/celery/signals.py +++ b/celery/signals.py @@ -18,12 +18,13 @@ 'task_prerun', 'task_postrun', 'task_success', 'task_received', 'task_rejected', 'task_unknown', 'task_retry', 'task_failure', 'task_revoked', 'celeryd_init', - 'celeryd_after_setup', 'worker_init', 'worker_process_init', - 'worker_process_shutdown', 'worker_ready', 'worker_shutdown', - 'worker_shutting_down', 'setup_logging', 'after_setup_logger', - 'after_setup_task_logger', 'beat_init', 'beat_embedded_init', - 'heartbeat_sent', 'eventlet_pool_started', 'eventlet_pool_preshutdown', - 'eventlet_pool_postshutdown', 'eventlet_pool_apply', + 'celeryd_after_setup', 'worker_init', 'worker_before_create_process', + 'worker_process_init', 'worker_process_shutdown', 'worker_ready', + 'worker_shutdown', 'worker_shutting_down', 'setup_logging', + 'after_setup_logger', 'after_setup_task_logger', 'beat_init', + 'beat_embedded_init', 'heartbeat_sent', 'eventlet_pool_started', + 'eventlet_pool_preshutdown', 'eventlet_pool_postshutdown', + 'eventlet_pool_apply', ) # - Task @@ -105,6 +106,7 @@ # - Worker import_modules = Signal(name='import_modules') worker_init = Signal(name='worker_init') +worker_before_create_process = Signal(name="worker_before_create_process") worker_process_init = Signal(name='worker_process_init') worker_process_shutdown = Signal(name='worker_process_shutdown') worker_ready = Signal(name='worker_ready') diff --git a/docs/userguide/signals.rst b/docs/userguide/signals.rst index f2dfc2320e1..44684727a9f 100644 --- a/docs/userguide/signals.rst +++ b/docs/userguide/signals.rst @@ -543,6 +543,20 @@ Provides arguments: Dispatched before the worker is started. +.. signal:: worker_before_create_process + +``worker_before_create_process`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Dispatched in the parent process, just before new child process is created in the prefork pool. +It can be used to clean up instances that don't behave well when forking. + +.. code-block:: python + + @signals.worker_before_create_process.connect + def clean_channels(**kwargs): + grpc_singleton.clean_channel() + .. signal:: worker_ready ``worker_ready`` diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index 49b80c17f0c..7690ef09a40 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -365,6 +365,18 @@ def test_register_with_event_loop__no_on_tick_dupes(self): pool.register_with_event_loop(hub) hub.on_tick.add.assert_called_once() + @patch('billiard.pool.Pool._create_worker_process') + def test_before_create_process_signal(self, create_process): + from celery import signals + on_worker_before_create_process = Mock() + signals.worker_before_create_process.connect(on_worker_before_create_process) + pool = asynpool.AsynPool(processes=1, threads=False) + create_process.assert_called_once_with(0) + on_worker_before_create_process.assert_any_call( + signal=signals.worker_before_create_process, + sender=pool, + ) + @t.skip.if_win32 class test_ResultHandler: From a12f24b7261499de6ff8e0aac3ffa2068a9886fd Mon Sep 17 00:00:00 2001 From: Maxwell Muoto Date: Sat, 8 Apr 2023 16:14:03 -0500 Subject: [PATCH 0349/1051] Update documentation (#8188) --- docs/getting-started/backends-and-brokers/index.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/index.rst b/docs/getting-started/backends-and-brokers/index.rst index 6b0c35e2d8b..5cb8c899363 100644 --- a/docs/getting-started/backends-and-brokers/index.rst +++ b/docs/getting-started/backends-and-brokers/index.rst @@ -98,4 +98,6 @@ SQLAlchemy SQLAlchemy is a backend. -It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is a ORM, and is the way Celery can use a SQL DB as a result backend. Historically, SQLAlchemy has not been the most stable result backend so if chosen one should proceed with caution. +It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is a ORM, and is the way Celery can use a SQL DB as a result backend. + +:ref:`See documentation for details ` \ No newline at end of file From c8b25394f0237972aea06e5e2e5e9be8a2bea868 Mon Sep 17 00:00:00 2001 From: Maxwell Muoto Date: Sun, 9 Apr 2023 03:48:41 -0500 Subject: [PATCH 0350/1051] Deprecate pytz and use zoneinfo (#8159) * Initial test coverage update * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fully remove pytz * remove from dependencies * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * bug fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * test fixes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix app test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * noqa * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * small change * Add tzdata for windows * Test case * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix formatting * Improved documentation * Fix * remove * Fix * Fix * more accurate * Comment * docstrings * future import * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * pre-commit * Fix * fix * docstring fix * comment * pre-commit * trailing whitespace fix * Update documentation * Update celery/utils/time.py * Update celery/utils/time.py * Update celery/utils/time.py * Update celery/utils/time.py * Update celery/utils/time.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/utils/iso8601.py | 9 +- celery/utils/text.py | 2 +- celery/utils/time.py | 191 ++++++++++++++++++----------- docs/faq.rst | 4 - docs/userguide/configuration.rst | 2 +- requirements/default.txt | 3 +- requirements/dev.txt | 1 - setup.cfg | 3 +- t/unit/app/test_app.py | 17 ++- t/unit/app/test_beat.py | 12 +- t/unit/app/test_schedules.py | 59 +++++---- t/unit/backends/test_mongodb.py | 11 +- t/unit/utils/test_serialization.py | 8 +- t/unit/utils/test_time.py | 134 ++++++++++---------- 14 files changed, 259 insertions(+), 197 deletions(-) diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 2a5ae69619f..ffe342b40c8 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -7,7 +7,6 @@ - raise :exc:`ValueError` instead of ``ParseError`` - return naive :class:`~datetime.datetime` by default - - uses :class:`pytz.FixedOffset` This is the original License: @@ -33,9 +32,7 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import re -from datetime import datetime - -from pytz import FixedOffset +from datetime import datetime, timedelta, timezone from celery.utils.deprecated import warn @@ -62,7 +59,7 @@ def parse_iso8601(datestring): groups = m.groupdict() tz = groups['timezone'] if tz == 'Z': - tz = FixedOffset(0) + tz = timezone(timedelta(0)) elif tz: m = TIMEZONE_REGEX.match(tz) prefix, hours, minutes = m.groups() @@ -70,7 +67,7 @@ def parse_iso8601(datestring): if prefix == '-': hours = -hours minutes = -minutes - tz = FixedOffset(minutes + hours * 60) + tz = timezone(timedelta(minutes=minutes, hours=hours)) return datetime( int(groups['year']), int(groups['month']), int(groups['day']), int(groups['hour'] or 0), diff --git a/celery/utils/text.py b/celery/utils/text.py index 3dc7ade973f..9d18a735bb6 100644 --- a/celery/utils/text.py +++ b/celery/utils/text.py @@ -93,7 +93,7 @@ def truncate(s: str, maxlen: int = 128, suffix: str = '...') -> str: return s -def pluralize(n: int, text: str, suffix: str = 's') -> str: +def pluralize(n: float, text: str, suffix: str = 's') -> str: """Pluralize term when n is greater than one.""" if n != 1: return text + suffix diff --git a/celery/utils/time.py b/celery/utils/time.py index 984da17c80f..f5329a5e39b 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -1,20 +1,31 @@ """Utilities related to dates, times, intervals, and timezones.""" +from __future__ import annotations + import numbers import os import random +import sys import time as _time from calendar import monthrange -from datetime import date, datetime, timedelta, tzinfo +from datetime import date, datetime, timedelta +from datetime import timezone as datetime_timezone +from datetime import tzinfo +from types import ModuleType +from typing import Any, Callable +from dateutil import tz as dateutil_tz from kombu.utils.functional import reprcall from kombu.utils.objects import cached_property -from pytz import AmbiguousTimeError, FixedOffset -from pytz import timezone as _timezone -from pytz import utc from .functional import dictfilter from .text import pluralize +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo + + __all__ = ( 'LocalTimezone', 'timezone', 'maybe_timedelta', 'delta_resolution', 'remaining', 'rate', 'weekday', @@ -48,15 +59,16 @@ class LocalTimezone(tzinfo): - """Local time implementation. + """Local time implementation. Provided in _Zone to the app when `enable_utc` is disabled. + Otherwise, _Zone provides a UTC ZoneInfo instance as the timezone implementation for the application. Note: Used only when the :setting:`enable_utc` setting is disabled. """ - _offset_cache = {} + _offset_cache: dict[int, tzinfo] = {} - def __init__(self): + def __init__(self) -> None: # This code is moved in __init__ to execute it as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) @@ -67,32 +79,30 @@ def __init__(self): self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET super().__init__() - def __repr__(self): + def __repr__(self) -> str: return f'' - def utcoffset(self, dt): + def utcoffset(self, dt: datetime) -> timedelta: return self.DSTOFFSET if self._isdst(dt) else self.STDOFFSET - def dst(self, dt): + def dst(self, dt: datetime) -> timedelta: return self.DSTDIFF if self._isdst(dt) else ZERO - def tzname(self, dt): + def tzname(self, dt: datetime) -> str: return _time.tzname[self._isdst(dt)] - def fromutc(self, dt): + def fromutc(self, dt: datetime) -> datetime: # The base tzinfo class no longer implements a DST # offset aware .fromutc() in Python 3 (Issue #2306). - - # I'd rather rely on pytz to do this, than port - # the C code from cpython's fromutc [asksol] offset = int(self.utcoffset(dt).seconds / 60.0) try: tz = self._offset_cache[offset] except KeyError: - tz = self._offset_cache[offset] = FixedOffset(offset) + tz = self._offset_cache[offset] = datetime_timezone( + timedelta(minutes=offset)) return tz.fromutc(dt.replace(tzinfo=tz)) - def _isdst(self, dt): + def _isdst(self, dt: datetime) -> bool: tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) @@ -102,53 +112,69 @@ def _isdst(self, dt): class _Zone: + """Timezone class that provides the timezone for the application. + If `enable_utc` is disabled, LocalTimezone is provided as the timezone provider through local(). + Otherwise, this class provides a UTC ZoneInfo instance as the timezone provider for the application. + + Additionally this class provides a few utility methods for converting datetimes. + """ + + def tz_or_local(self, tzinfo: tzinfo | None = None) -> tzinfo: + """Return either our local timezone or the provided timezone.""" - def tz_or_local(self, tzinfo=None): # pylint: disable=redefined-outer-name if tzinfo is None: return self.local return self.get_timezone(tzinfo) - def to_local(self, dt, local=None, orig=None): + def to_local(self, dt: datetime, local=None, orig=None): + """Converts a datetime to the local timezone.""" + if is_naive(dt): dt = make_aware(dt, orig or self.utc) return localize(dt, self.tz_or_local(local)) - def to_system(self, dt): + def to_system(self, dt: datetime) -> datetime: + """Converts a datetime to the system timezone.""" + # tz=None is a special case since Python 3.3, and will # convert to the current local timezone (Issue #2306). return dt.astimezone(tz=None) - def to_local_fallback(self, dt): + def to_local_fallback(self, dt: datetime) -> datetime: + """Converts a datetime to the local timezone, or the system timezone.""" if is_naive(dt): return make_aware(dt, self.local) return localize(dt, self.local) - def get_timezone(self, zone): + def get_timezone(self, zone: str | tzinfo) -> tzinfo: + """Returns ZoneInfo timezone if the provided zone is a string, otherwise return the zone.""" if isinstance(zone, str): - return _timezone(zone) + return ZoneInfo(zone) return zone @cached_property - def local(self): + def local(self) -> LocalTimezone: + """Return LocalTimezone instance for the application.""" return LocalTimezone() @cached_property - def utc(self): + def utc(self) -> tzinfo: + """Return UTC timezone created with ZoneInfo.""" return self.get_timezone('UTC') timezone = _Zone() -def maybe_timedelta(delta): +def maybe_timedelta(delta: int) -> timedelta: """Convert integer to timedelta, if argument is an integer.""" if isinstance(delta, numbers.Real): return timedelta(seconds=delta) return delta -def delta_resolution(dt, delta): +def delta_resolution(dt: datetime, delta: timedelta) -> datetime: """Round a :class:`~datetime.datetime` to the resolution of timedelta. If the :class:`~datetime.timedelta` is in days, the @@ -171,7 +197,9 @@ def delta_resolution(dt, delta): return dt -def remaining(start, ends_in, now=None, relative=False): +def remaining( + start: datetime, ends_in: timedelta, now: Callable | None = None, + relative: bool = False) -> timedelta: """Calculate the remaining time for a start date and a timedelta. For example, "how many seconds left for 30 seconds after start?" @@ -189,7 +217,9 @@ def remaining(start, ends_in, now=None, relative=False): ~datetime.timedelta: Remaining time. """ now = now or datetime.utcnow() - if str(start.tzinfo) == str(now.tzinfo) and now.utcoffset() != start.utcoffset(): + if str( + start.tzinfo) == str( + now.tzinfo) and now.utcoffset() != start.utcoffset(): # DST started/ended start = start.replace(tzinfo=now.tzinfo) end_date = start + ends_in @@ -202,7 +232,7 @@ def remaining(start, ends_in, now=None, relative=False): return ret -def rate(r): +def rate(r: str) -> float: """Convert rate string (`"100/m"`, `"2/h"` or `"0.5/s"`) to seconds.""" if r: if isinstance(r, str): @@ -212,7 +242,7 @@ def rate(r): return 0 -def weekday(name): +def weekday(name: str) -> int: """Return the position of a weekday: 0 - 7, where 0 is Sunday. Example: @@ -227,7 +257,9 @@ def weekday(name): raise KeyError(name) -def humanize_seconds(secs, prefix='', sep='', now='now', microseconds=False): +def humanize_seconds( + secs: int, prefix: str = '', sep: str = '', now: str = 'now', + microseconds: bool = False) -> str: """Show seconds in human form. For example, 60 becomes "1 minute", and 7200 becomes "2 hours". @@ -250,7 +282,7 @@ def humanize_seconds(secs, prefix='', sep='', now='now', microseconds=False): return now -def maybe_iso8601(dt): +def maybe_iso8601(dt: datetime | str | None) -> None | datetime: """Either ``datetime | str -> datetime`` or ``None -> None``.""" if not dt: return @@ -259,52 +291,58 @@ def maybe_iso8601(dt): return datetime.fromisoformat(dt) -def is_naive(dt): - """Return :const:`True` if :class:`~datetime.datetime` is naive.""" +def is_naive(dt: datetime) -> bool: + """Return True if :class:`~datetime.datetime` is naive, meaning it doesn't have timezone info set.""" return dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None -def make_aware(dt, tz): +def _can_detect_ambiguous(tz: tzinfo) -> bool: + """Helper function to determine if a timezone can detect ambiguous times using dateutil.""" + + return isinstance(tz, ZoneInfo) or hasattr(tz, "is_ambiguous") + + +def _is_ambigious(dt: datetime, tz: tzinfo) -> bool: + """Helper function to determine if a timezone is ambiguous using python's dateutil module. + + Returns False if the timezone cannot detect ambiguity, or if there is no ambiguity, otherwise True. + + In order to detect ambiguous datetimes, the timezone must be built using ZoneInfo, or have an is_ambiguous + method. Previously, pytz timezones would throw an AmbiguousTimeError if the localized dt was ambiguous, + but now we need to specifically check for ambiguity with dateutil, as pytz is deprecated. + """ + + return _can_detect_ambiguous(tz) and dateutil_tz.datetime_ambiguous(dt) + + +def make_aware(dt: datetime, tz: tzinfo) -> datetime: """Set timezone for a :class:`~datetime.datetime` object.""" - try: - _localize = tz.localize - except AttributeError: - return dt.replace(tzinfo=tz) - else: - # works on pytz timezones - try: - return _localize(dt, is_dst=None) - except AmbiguousTimeError: - return min(_localize(dt, is_dst=True), - _localize(dt, is_dst=False)) + dt = dt.replace(tzinfo=tz) + if _is_ambigious(dt, tz): + dt = min(dt.replace(fold=0), dt.replace(fold=1)) + return dt + + +def localize(dt: datetime, tz: tzinfo) -> datetime: + """Convert aware :class:`~datetime.datetime` to another timezone. -def localize(dt, tz): - """Convert aware :class:`~datetime.datetime` to another timezone.""" + Using a ZoneInfo timezone will give the most flexibility in terms of ambiguous DST handling. + """ if is_naive(dt): # Ensure timezone aware datetime dt = make_aware(dt, tz) - if dt.tzinfo == utc: + if dt.tzinfo == ZoneInfo("UTC"): dt = dt.astimezone(tz) # Always safe to call astimezone on utc zones - try: - _normalize = tz.normalize - except AttributeError: # non-pytz tz - return dt - else: - try: - return _normalize(dt, is_dst=None) - except TypeError: - return _normalize(dt) - except AmbiguousTimeError: - return min(_normalize(dt, is_dst=True), - _normalize(dt, is_dst=False)) + return dt -def to_utc(dt): +def to_utc(dt: datetime) -> datetime: """Convert naive :class:`~datetime.datetime` to UTC.""" return make_aware(dt, timezone.utc) -def maybe_make_aware(dt, tz=None, naive_as_utc=True): +def maybe_make_aware(dt: datetime, tz: tzinfo | None = None, + naive_as_utc: bool = True) -> datetime: """Convert dt to aware datetime, do nothing if dt is already aware.""" if is_naive(dt): if naive_as_utc: @@ -320,7 +358,7 @@ class ffwd: def __init__(self, year=None, month=None, weeks=0, weekday=None, day=None, hour=None, minute=None, second=None, microsecond=None, - **kwargs): + **kwargs: Any): # pylint: disable=redefined-outer-name # weekday is also a function in outer scope. self.year = year @@ -335,11 +373,11 @@ def __init__(self, year=None, month=None, weeks=0, weekday=None, day=None, self.days = weeks * 7 self._has_time = self.hour is not None or self.minute is not None - def __repr__(self): + def __repr__(self) -> str: return reprcall('ffwd', (), self._fields(weeks=self.weeks, weekday=self.weekday)) - def __radd__(self, other): + def __radd__(self, other: Any) -> timedelta: if not isinstance(other, date): return NotImplemented year = self.year or other.year @@ -351,7 +389,7 @@ def __radd__(self, other): ret += timedelta(days=(7 - ret.weekday() + self.weekday) % 7) return ret + timedelta(days=self.days) - def _fields(self, **extra): + def _fields(self, **extra: Any) -> dict[str, Any]: return dictfilter({ 'year': self.year, 'month': self.month, 'day': self.day, 'hour': self.hour, 'minute': self.minute, @@ -359,24 +397,27 @@ def _fields(self, **extra): }, **extra) -def utcoffset(time=_time, localtime=_time.localtime): +def utcoffset( + time: ModuleType = _time, + localtime: Callable[..., _time.struct_time] = _time.localtime) -> float: """Return the current offset to UTC in hours.""" if localtime().tm_isdst: return time.altzone // 3600 return time.timezone // 3600 -def adjust_timestamp(ts, offset, here=utcoffset): +def adjust_timestamp(ts: float, offset: int, + here: Callable[..., float] = utcoffset) -> float: """Adjust timestamp based on provided utcoffset.""" return ts - (offset - here()) * 3600 def get_exponential_backoff_interval( - factor, - retries, - maximum, - full_jitter=False -): + factor: int, + retries: int, + maximum: int, + full_jitter: bool = False +) -> int: """Calculate the exponential backoff wait time.""" # Will be zero if factor equals 0 countdown = min(maximum, factor * (2 ** retries)) diff --git a/docs/faq.rst b/docs/faq.rst index 01c92d425ce..cd5f3aa874d 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -99,10 +99,6 @@ that these improvements will be merged back into Python one day. It's also used for compatibility with older Python versions that don't come with the multiprocessing module. -- :pypi:`pytz` - -The pytz module provides timezone definitions and related tools. - kombu ~~~~~ diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 1831157d612..79f621cce4f 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -268,7 +268,7 @@ upgraded. Default: ``"UTC"``. Configure Celery to use a custom time zone. -The timezone value can be any time zone supported by the :pypi:`pytz` +The timezone value can be any time zone supported by the `ZoneInfo `_ library. If not set the UTC timezone is used. For backwards compatibility diff --git a/requirements/default.txt b/requirements/default.txt index f03f9b8e03a..4678436d793 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,4 +1,3 @@ -pytz>=2021.3 billiard>=4.1.0,<5.0 kombu>=5.3.0b3,<6.0 vine>=5.0.0,<6.0 @@ -7,3 +6,5 @@ click-didyoumean>=0.3.0 click-repl>=0.2.0 click-plugins>=1.1.1 importlib-metadata>=3.6; python_version < '3.8' +backports.zoneinfo>=0.2.1; python_version < '3.9' +tzdata>=2022.7 \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/dev.txt index 7936822a2de..5ac972cef53 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,3 @@ -pytz git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git diff --git a/setup.cfg b/setup.cfg index cd04c7a134b..92cde32eb3a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -32,7 +32,8 @@ per-file-ignores = D, [bdist_rpm] -requires = pytz >= 2016.7 +requires = backports.zoneinfo>=0.2.1;python_version<'3.9' + tzdata>=2022.7 billiard >=4.0.2,<5.0 kombu >= 5.2.1,<6.0.0 diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index c63ff17c16d..7aae8f52d74 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -2,6 +2,7 @@ import itertools import os import ssl +import sys import uuid from copy import deepcopy from datetime import datetime, timedelta @@ -27,6 +28,11 @@ from celery.utils.time import localize, timezone, to_utc from t.unit import conftest +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo # noqa + THIS_IS_A_KEY = 'this is a value' @@ -93,7 +99,7 @@ def test_now(self): app_now = self.app.now() - assert app_now.tzinfo.zone == tz_us_eastern.zone + assert app_now.tzinfo == tz_us_eastern diff = to_utc(datetime.utcnow()) - localize(app_now, tz_utc) assert diff <= timedelta(seconds=1) @@ -103,7 +109,7 @@ def test_now(self): del self.app.timezone app_now = self.app.now() assert self.app.timezone == tz_us_eastern - assert app_now.tzinfo.zone == tz_us_eastern.zone + assert app_now.tzinfo == tz_us_eastern @patch('celery.app.base.set_default_app') def test_set_default(self, set_default_app): @@ -520,7 +526,8 @@ def test_can_get_type_hints_for_tasks(self): def foo(parameter: int) -> None: pass - assert typing.get_type_hints(foo) == {'parameter': int, 'return': type(None)} + assert typing.get_type_hints(foo) == { + 'parameter': int, 'return': type(None)} def test_annotate_decorator(self): from celery.app.task import Task @@ -1096,7 +1103,9 @@ def test_thread_backend_thread_safe(self): def test_send_task_expire_as_string(self): try: - self.app.send_task('foo', (1, 2), expires='2023-03-16T17:21:20.663973') + self.app.send_task( + 'foo', (1, 2), + expires='2023-03-16T17:21:20.663973') except TypeError as e: pytest.fail(f'raise unexcepted error {e}') diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index b7ff69e4a97..64dad3e8f2d 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -1,16 +1,21 @@ import errno +import sys from datetime import datetime, timedelta from pickle import dumps, loads from unittest.mock import Mock, call, patch import pytest -import pytz from celery import __version__, beat, uuid from celery.beat import BeatLazyFunc, event_t from celery.schedules import crontab, schedule from celery.utils.objects import Bunch +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo + class MockShelve(dict): closed = False @@ -434,9 +439,10 @@ def test_merge_inplace(self): assert a.schedule['bar'].schedule._next_run_at == 40 def test_when(self): - now_time_utc = datetime(2000, 10, 10, 10, 10, 10, 10, tzinfo=pytz.utc) + now_time_utc = datetime(2000, 10, 10, 10, 10, + 10, 10, tzinfo=ZoneInfo("UTC")) now_time_casey = now_time_utc.astimezone( - pytz.timezone('Antarctica/Casey') + ZoneInfo('Antarctica/Casey') ) scheduler = mScheduler(app=self.app) result_utc = scheduler._when( diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 793e8b6f3a2..1f4d5fdd85a 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -1,3 +1,4 @@ +import sys import time from contextlib import contextmanager from datetime import datetime, timedelta @@ -6,10 +7,15 @@ from unittest.mock import Mock import pytest -import pytz from celery.schedules import ParseException, crontab, crontab_parser, schedule, solar +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo + + assertions = TestCase('__init__') @@ -78,8 +84,9 @@ def test_event_uses_center(self): try: s.remaining_estimate(datetime.utcnow()) except TypeError: - pytest.fail(f"{s.method} was called with 'use_center' which is not a " - "valid keyword for the function.") + pytest.fail( + f"{s.method} was called with 'use_center' which is not a " + "valid keyword for the function.") class test_schedule: @@ -442,55 +449,55 @@ def test_day_after_dst_end(self): # Test for #1604 issue with region configuration using DST tzname = "Europe/Paris" self.app.timezone = tzname - tz = pytz.timezone(tzname) + tz = ZoneInfo(tzname) crontab = self.crontab(minute=0, hour=9) # Set last_run_at Before DST end - last_run_at = tz.localize(datetime(2017, 10, 28, 9, 0)) + last_run_at = datetime(2017, 10, 28, 9, 0, tzinfo=tz) # Set now after DST end - now = tz.localize(datetime(2017, 10, 29, 7, 0)) + now = datetime(2017, 10, 29, 7, 0, tzinfo=tz) crontab.nowfun = lambda: now next = now + crontab.remaining_estimate(last_run_at) assert next.utcoffset().seconds == 3600 - assert next == tz.localize(datetime(2017, 10, 29, 9, 0)) + assert next == datetime(2017, 10, 29, 9, 0, tzinfo=tz) def test_day_after_dst_start(self): # Test for #1604 issue with region configuration using DST tzname = "Europe/Paris" self.app.timezone = tzname - tz = pytz.timezone(tzname) + tz = ZoneInfo(tzname) crontab = self.crontab(minute=0, hour=9) # Set last_run_at Before DST start - last_run_at = tz.localize(datetime(2017, 3, 25, 9, 0)) + last_run_at = datetime(2017, 3, 25, 9, 0, tzinfo=tz) # Set now after DST start - now = tz.localize(datetime(2017, 3, 26, 7, 0)) + now = datetime(2017, 3, 26, 7, 0, tzinfo=tz) crontab.nowfun = lambda: now next = now + crontab.remaining_estimate(last_run_at) assert next.utcoffset().seconds == 7200 - assert next == tz.localize(datetime(2017, 3, 26, 9, 0)) + assert next == datetime(2017, 3, 26, 9, 0, tzinfo=tz) def test_negative_utc_timezone_with_day_of_month(self): # UTC-8 tzname = "America/Los_Angeles" self.app.timezone = tzname - tz = pytz.timezone(tzname) + tz = ZoneInfo(tzname) # set day_of_month to test on _delta_to_next crontab = self.crontab(minute=0, day_of_month='27-31') # last_run_at: '2023/01/28T23:00:00-08:00' - last_run_at = tz.localize(datetime(2023, 1, 28, 23, 0)) + last_run_at = datetime(2023, 1, 28, 23, 0, tzinfo=tz) # now: '2023/01/29T00:00:00-08:00' - now = tz.localize(datetime(2023, 1, 29, 0, 0)) + now = datetime(2023, 1, 29, 0, 0, tzinfo=tz) crontab.nowfun = lambda: now next = now + crontab.remaining_estimate(last_run_at) - assert next == tz.localize(datetime(2023, 1, 29, 0, 0)) + assert next == datetime(2023, 1, 29, 0, 0, tzinfo=tz) class test_crontab_is_due: @@ -831,7 +838,7 @@ def test_execution_not_due_if_task_not_run_at_last_feasible_time_outside_deadlin now = datetime(2022, 12, 5, 10, 30) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -847,7 +854,7 @@ def test_execution_not_due_if_task_not_run_at_last_feasible_time_no_deadline_set now = datetime(2022, 12, 5, 10, 30) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -865,7 +872,7 @@ def test_execution_due_if_task_not_run_at_last_feasible_time_within_deadline( now = datetime(2022, 12, 5, 8, 0) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -884,7 +891,7 @@ def test_execution_due_if_task_not_run_at_any_feasible_time_within_deadline( now = datetime(2022, 12, 5, 8, 0) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -904,7 +911,7 @@ def test_execution_not_due_if_task_not_run_at_any_feasible_time_outside_deadline now = datetime(2022, 12, 5, 11, 0) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -918,7 +925,7 @@ def test_execution_not_due_if_last_run_in_future(self): now = datetime(2022, 12, 5, 10, 30) expected_next_execution_time = datetime(2022, 12, 7, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -932,7 +939,7 @@ def test_execution_not_due_if_last_run_at_last_feasible_time(self): now = datetime(2022, 12, 5, 10, 30) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -946,7 +953,7 @@ def test_execution_not_due_if_last_run_past_last_feasible_time(self): now = datetime(2022, 12, 5, 10, 30) expected_next_execution_time = datetime(2022, 12, 6, 7, 30) expected_remaining = ( - expected_next_execution_time - now).total_seconds() + expected_next_execution_time - now).total_seconds() # Run the daily (7:30) crontab with the current date with patch_crontab_nowfun(self.daily, now): @@ -958,16 +965,16 @@ def test_execution_due_for_negative_utc_timezone_with_day_of_month(self): # UTC-8 tzname = "America/Los_Angeles" self.app.timezone = tzname - tz = pytz.timezone(tzname) + tz = ZoneInfo(tzname) # set day_of_month to test on _delta_to_next crontab = self.crontab(minute=0, day_of_month='27-31') # last_run_at: '2023/01/28T23:00:00-08:00' - last_run_at = tz.localize(datetime(2023, 1, 28, 23, 0)) + last_run_at = datetime(2023, 1, 28, 23, 0, tzinfo=tz) # now: '2023/01/29T00:00:00-08:00' - now = tz.localize(datetime(2023, 1, 29, 0, 0)) + now = datetime(2023, 1, 29, 0, 0, tzinfo=tz) with patch_crontab_nowfun(crontab, now): due, remaining = crontab.is_due(last_run_at) diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index a0bb8169ea3..a3b037892a9 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -5,7 +5,6 @@ import dns.version import pymongo import pytest -import pytz from kombu.exceptions import EncodeError try: @@ -13,6 +12,14 @@ except ImportError: ConfigurationError = None + +import sys + +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo + from celery import states, uuid from celery.backends.mongodb import Binary, InvalidDocument, MongoBackend from celery.exceptions import ImproperlyConfigured @@ -662,7 +669,7 @@ def __eq__(self, other): "serializers": ["bson", "pickle", "yaml"], }, { - "result": datetime.datetime(2000, 1, 1, 0, 0, 0, 0, tzinfo=pytz.utc), + "result": datetime.datetime(2000, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), "serializers": ["pickle", "yaml"], }, # custom types diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py index b5617ed2bfb..9e762d5e8af 100644 --- a/t/unit/utils/test_serialization.py +++ b/t/unit/utils/test_serialization.py @@ -5,12 +5,16 @@ from unittest.mock import Mock import pytest -import pytz from kombu import Queue from celery.utils.serialization import (STRTOBOOL_DEFAULT_TABLE, UnpickleableExceptionWrapper, ensure_serializable, get_pickleable_etype, jsonify, strtobool) +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo + class test_AAPickle: @@ -64,7 +68,7 @@ class test_jsonify: ['foo', 'bar', 'baz'], {'foo': 'bar'}, datetime.utcnow(), - datetime.utcnow().replace(tzinfo=pytz.utc), + datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")), datetime.utcnow().replace(microsecond=0), date(2012, 1, 1), time(hour=1, minute=30), diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 98758c4d471..9841f364c5a 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -1,9 +1,15 @@ -from datetime import datetime, timedelta, tzinfo +import sys +from datetime import datetime, timedelta +from datetime import timezone as _timezone +from datetime import tzinfo from unittest.mock import Mock, patch import pytest -import pytz -from pytz import AmbiguousTimeError + +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo +else: + from backports.zoneinfo import ZoneInfo from celery.utils.iso8601 import parse_iso8601 from celery.utils.time import (LocalTimezone, delta_resolution, ffwd, get_exponential_backoff_interval, @@ -42,19 +48,21 @@ def test_daylight(self, patching): class test_iso8601: def test_parse_with_timezone(self): - d = datetime.utcnow().replace(tzinfo=pytz.utc) + d = datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")) assert parse_iso8601(d.isoformat()) == d # 2013-06-07T20:12:51.775877+00:00 iso = d.isoformat() iso1 = iso.replace('+00:00', '-01:00') d1 = parse_iso8601(iso1) - assert d1.tzinfo._minutes == -60 + d1_offset_in_minutes = d1.utcoffset().total_seconds() / 60 + assert d1_offset_in_minutes == -60 iso2 = iso.replace('+00:00', '+01:00') d2 = parse_iso8601(iso2) - assert d2.tzinfo._minutes == +60 + d2_offset_in_minutes = d2.utcoffset().total_seconds() / 60 + assert d2_offset_in_minutes == +60 iso3 = iso.replace('+00:00', 'Z') d3 = parse_iso8601(iso3) - assert d3.tzinfo == pytz.UTC + assert d3.tzinfo == _timezone.utc @pytest.mark.parametrize('delta,expected', [ @@ -109,14 +117,14 @@ def test_remaining(): """ The upcoming cases check whether the next run is calculated correctly """ - eastern_tz = pytz.timezone("US/Eastern") - tokyo_tz = pytz.timezone("Asia/Tokyo") + eastern_tz = ZoneInfo("US/Eastern") + tokyo_tz = ZoneInfo("Asia/Tokyo") # Case 1: `start` in UTC and `now` in other timezone - start = datetime.now(pytz.utc) + start = datetime.now(ZoneInfo("UTC")) now = datetime.now(eastern_tz) delta = timedelta(hours=1) - assert str(start.tzinfo) == str(pytz.utc) + assert str(start.tzinfo) == str(ZoneInfo("UTC")) assert str(now.tzinfo) == str(eastern_tz) rem_secs = remaining(start, delta, now).total_seconds() # assert remaining time is approximately equal to delta @@ -138,11 +146,18 @@ def test_remaining(): start (i.e. there is not an hour diff due to DST). In 2019, DST starts on March 10 """ - start = eastern_tz.localize(datetime(month=3, day=9, year=2019, hour=10, minute=0)) # EST - now = eastern_tz.localize(datetime(day=11, month=3, year=2019, hour=1, minute=0)) # EDT - delta = ffwd(hour=10, year=2019, microsecond=0, minute=0, second=0, day=11, weeks=0, month=3) + start = datetime( + month=3, day=9, year=2019, hour=10, + minute=0, tzinfo=eastern_tz) # EST + + now = datetime( + day=11, month=3, year=2019, hour=1, + minute=0, tzinfo=eastern_tz) # EDT + delta = ffwd(hour=10, year=2019, microsecond=0, minute=0, + second=0, day=11, weeks=0, month=3) # `next_actual_time` is the next time to run (derived from delta) - next_actual_time = eastern_tz.localize(datetime(day=11, month=3, year=2019, hour=10, minute=0)) # EDT + next_actual_time = datetime( + day=11, month=3, year=2019, hour=10, minute=0, tzinfo=eastern_tz) # EDT assert start.tzname() == "EST" assert now.tzname() == "EDT" assert next_actual_time.tzname() == "EDT" @@ -153,7 +168,7 @@ def test_remaining(): class test_timezone: - def test_get_timezone_with_pytz(self): + def test_get_timezone_with_zoneinfo(self): assert timezone.get_timezone('UTC') def test_tz_or_local(self): @@ -172,103 +187,81 @@ def test_to_local_fallback(self): class test_make_aware: - def test_tz_without_localize(self): + def test_standard_tz(self): tz = tzinfo() - assert not hasattr(tz, 'localize') wtz = make_aware(datetime.utcnow(), tz) assert wtz.tzinfo == tz - def test_when_has_localize(self): - - class tzz(tzinfo): - raises = False - - def localize(self, dt, is_dst=None): - self.localized = True - if self.raises and is_dst is None: - self.raised = True - raise AmbiguousTimeError() - return 1 # needed by min() in Python 3 (None not hashable) - - tz = tzz() - make_aware(datetime.utcnow(), tz) - assert tz.localized - - tz2 = tzz() - tz2.raises = True - make_aware(datetime.utcnow(), tz2) - assert tz2.localized - assert tz2.raised + def test_tz_when_zoneinfo(self): + tz = ZoneInfo('US/Eastern') + wtz = make_aware(datetime.utcnow(), tz) + assert wtz.tzinfo == tz def test_maybe_make_aware(self): aware = datetime.utcnow().replace(tzinfo=timezone.utc) assert maybe_make_aware(aware) naive = datetime.utcnow() assert maybe_make_aware(naive) - assert maybe_make_aware(naive).tzinfo is pytz.utc + assert maybe_make_aware(naive).tzinfo is ZoneInfo("UTC") - tz = pytz.timezone('US/Eastern') + tz = ZoneInfo('US/Eastern') eastern = datetime.utcnow().replace(tzinfo=tz) assert maybe_make_aware(eastern).tzinfo is tz utcnow = datetime.utcnow() - assert maybe_make_aware(utcnow, 'UTC').tzinfo is pytz.utc + assert maybe_make_aware(utcnow, 'UTC').tzinfo is ZoneInfo("UTC") class test_localize: - def test_tz_without_normalize(self): + def test_standard_tz(self): class tzz(tzinfo): def utcoffset(self, dt): return None # Mock no utcoffset specified tz = tzz() - assert not hasattr(tz, 'normalize') assert localize(make_aware(datetime.utcnow(), tz), tz) - def test_when_has_normalize(self): + @patch('dateutil.tz.datetime_ambiguous') + def test_when_zoneinfo(self, datetime_ambiguous_mock): + datetime_ambiguous_mock.return_value = False + tz = ZoneInfo("US/Eastern") + assert localize(make_aware(datetime.utcnow(), tz), tz) + + datetime_ambiguous_mock.return_value = True + tz2 = ZoneInfo("US/Eastern") + assert localize(make_aware(datetime.utcnow(), tz2), tz2) + @patch('dateutil.tz.datetime_ambiguous') + def test_when_is_ambiguous(self, datetime_ambiguous_mock): class tzz(tzinfo): - raises = None def utcoffset(self, dt): - return None + return None # Mock no utcoffset specified - def normalize(self, dt, **kwargs): - self.normalized = True - if self.raises and kwargs and kwargs.get('is_dst') is None: - self.raised = True - raise self.raises - return 1 # needed by min() in Python 3 (None not hashable) + def is_ambiguous(self, dt): + return True + datetime_ambiguous_mock.return_value = False tz = tzz() - localize(make_aware(datetime.utcnow(), tz), tz) - assert tz.normalized + assert localize(make_aware(datetime.utcnow(), tz), tz) + datetime_ambiguous_mock.return_value = True tz2 = tzz() - tz2.raises = AmbiguousTimeError() - localize(make_aware(datetime.utcnow(), tz2), tz2) - assert tz2.normalized - assert tz2.raised - - tz3 = tzz() - tz3.raises = TypeError() - localize(make_aware(datetime.utcnow(), tz3), tz3) - assert tz3.normalized - assert tz3.raised + assert localize(make_aware(datetime.utcnow(), tz2), tz2) def test_localize_changes_utc_dt(self): - now_utc_time = datetime.now(tz=pytz.utc) - local_tz = pytz.timezone('US/Eastern') + now_utc_time = datetime.now(tz=ZoneInfo("UTC")) + local_tz = ZoneInfo('US/Eastern') localized_time = localize(now_utc_time, local_tz) assert localized_time == now_utc_time def test_localize_aware_dt_idempotent(self): t = (2017, 4, 23, 21, 36, 59, 0) - local_zone = pytz.timezone('America/New_York') + local_zone = ZoneInfo('America/New_York') local_time = datetime(*t) local_time_aware = datetime(*t, tzinfo=local_zone) - alternate_zone = pytz.timezone('America/Detroit') + alternate_zone = ZoneInfo('America/Detroit') localized_time = localize(local_time_aware, alternate_zone) assert localized_time == local_time_aware assert local_zone.utcoffset( @@ -356,5 +349,6 @@ def test_negative_values(self): def test_valid_random_range(self, rr): rr.return_value = 0 maximum = 100 - get_exponential_backoff_interval(factor=40, retries=10, maximum=maximum, full_jitter=True) + get_exponential_backoff_interval( + factor=40, retries=10, maximum=maximum, full_jitter=True) rr.assert_called_once_with(maximum + 1) From e6856add120d988b7495cefe91a58f977baf6f1f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 17:30:48 +0000 Subject: [PATCH 0351/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.1.1 → v1.2.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.1.1...v1.2.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bcf37dbaec7..00b916b6f96 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 + rev: v1.2.0 hooks: - id: mypy pass_filenames: false From 808b406c57cbcfe6798baa83f83e82f39c944b04 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 16:29:31 +0600 Subject: [PATCH 0352/1051] Update dev.txt --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 5ac972cef53..441d81a3230 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -2,4 +2,4 @@ git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git vine>=5.0.0 -isort==5.11.4 +isort==5.12.0 From fe891a6c79f4eb476e6d588a39fb686f05f85ac5 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:07:13 +0600 Subject: [PATCH 0353/1051] Update test.txt (#8193) --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index aa5c85f3633..46ad19801e3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,13 +1,13 @@ -pytest==7.2.2 +pytest==7.3.0 pytest-celery==0.0.0 pytest-subtests==0.10.0 pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 -boto3==1.26.104 +boto3==1.26.110 moto==4.1.6 # typing extensions -mypy==1.1.1; platform_python_implementation=="CPython" +mypy==1.2.0; platform_python_implementation=="CPython" pre-commit==2.21.0 -r extras/yaml.txt -r extras/msgpack.txt From 393e7294554f76505f8bd82f367357fb50f23f1c Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:37:44 +0600 Subject: [PATCH 0354/1051] Update test-integration.txt (#8194) --- requirements/test-integration.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-integration.txt b/requirements/test-integration.txt index 545143cf174..50f5fdd9dcf 100644 --- a/requirements/test-integration.txt +++ b/requirements/test-integration.txt @@ -2,5 +2,5 @@ -r extras/azureblockblob.txt -r extras/auth.txt -r extras/memcache.txt -pytest-rerunfailures>=6.0 +pytest-rerunfailures>=11.1.2 git+https://github.com/celery/kombu.git From 16ee5e8923dd37730844aed89237a01013df8fdc Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:39:32 +0600 Subject: [PATCH 0355/1051] Update zstd.txt (#8195) --- requirements/extras/zstd.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index f702f7f0bda..0236020bc1f 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.19.0 +zstandard==0.20.0 From 34c72699f4cab47b6a27f4ba44cd3ebc39dd2ce6 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:41:11 +0600 Subject: [PATCH 0356/1051] Update s3.txt (#8196) --- requirements/extras/s3.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/s3.txt b/requirements/extras/s3.txt index 6d8caec075f..2dadf569710 100644 --- a/requirements/extras/s3.txt +++ b/requirements/extras/s3.txt @@ -1 +1 @@ -boto3>=1.9.125 +boto3==1.26.110 From 96fa072038d78852431d594d2479fcbd6f3c4048 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:41:57 +0600 Subject: [PATCH 0357/1051] Update msgpack.txt (#8199) --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index f912067dd44..e0ee0a59187 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.4 +msgpack==1.0.5 From ea5f18d8ecea2167526001a673b73672d8ff4120 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 11 Apr 2023 17:42:50 +0600 Subject: [PATCH 0358/1051] Update solar.txt (#8198) --- requirements/extras/solar.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/solar.txt b/requirements/extras/solar.txt index e77d1f1752a..2b7a44d1864 100644 --- a/requirements/extras/solar.txt +++ b/requirements/extras/solar.txt @@ -1 +1 @@ -ephem~=4.1.3; platform_python_implementation!="PyPy" +ephem==4.1.4; platform_python_implementation!="PyPy" From f28047ac05f2445acf0626419bfa53b0df089f38 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 14 Apr 2023 15:00:19 +0300 Subject: [PATCH 0359/1051] Add Semgrep CI (#8201) Co-authored-by: semgrep.dev on behalf of @Nusnus --- .github/workflows/semgrep.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/semgrep.yml diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml new file mode 100644 index 00000000000..88d6d45d5a4 --- /dev/null +++ b/.github/workflows/semgrep.yml @@ -0,0 +1,23 @@ +on: + pull_request: {} + push: + branches: + - main + - master + paths: + - .github/workflows/semgrep.yml + schedule: + # random HH:MM to avoid a load spike on GitHub Actions at 00:00 + - cron: 44 6 * * * +name: Semgrep +jobs: + semgrep: + name: Scan + runs-on: ubuntu-20.04 + env: + SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} + container: + image: returntocorp/semgrep + steps: + - uses: actions/checkout@v3 + - run: semgrep ci From cd0a30b506e8330dbf1bf3c583397beb886b85ff Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 14 Apr 2023 15:35:47 +0300 Subject: [PATCH 0360/1051] Added semgrep to README.rst (#8202) https://github.com/orgs/celery/teams/core-developers/discussions/41 --- README.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index fe84259088f..a26230e61ec 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,6 @@ .. image:: https://docs.celeryq.dev/en/latest/_images/celery-banner-small.png -|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| +|build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| :Version: 5.3.0b2 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html @@ -519,6 +519,10 @@ file in the top distribution directory for the full license text. :alt: Celery can be installed via wheel :target: https://pypi.org/project/celery/ +.. |semgrep| image:: https://img.shields.io/badge/semgrep-security-green.svg + :alt: Semgrep security + :target: https://go.semgrep.dev/home + .. |pyversion| image:: https://img.shields.io/pypi/pyversions/celery.svg :alt: Supported Python versions. :target: https://pypi.org/project/celery/ From 852b1bb3b33a957f35ea96bfd69b7e10f0a473d6 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 18 Apr 2023 10:59:17 +0600 Subject: [PATCH 0361/1051] Update django.txt (#8197) --- requirements/extras/django.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/django.txt b/requirements/extras/django.txt index e97c9bd1ecd..c37fbd16511 100644 --- a/requirements/extras/django.txt +++ b/requirements/extras/django.txt @@ -1 +1 @@ -Django>=1.11 +Django>=2.2.28 From bb17cf08987350fd5a96bfc07f3eae916731ec70 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 18 Apr 2023 11:00:59 +0600 Subject: [PATCH 0362/1051] Update redis.txt 4.3.6 (#8161) --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index a37493948d2..e186f2e9e9f 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.2.2,<4.4.0 +redis>=4.3.6,<4.4.0 From 0e92577eb1b5358c3bd4ebc7a5e880508481f981 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 18 Apr 2023 11:36:47 +0600 Subject: [PATCH 0363/1051] start removing codecov from pypi (#8206) --- requirements/test-ci-base.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 2bca034397a..194beedf31f 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,6 +1,5 @@ pytest-cov==4.0.0 pytest-github-actions-annotate-failures==0.1.8 -codecov==2.1.12 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt From 7e44abdf7800a89c8872d32d2c39b62f94ea3689 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 18 Apr 2023 12:17:17 +0600 Subject: [PATCH 0364/1051] Update test.txt dependencies (#8205) * Update test.txt dependencies * Update requirements/test.txt --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 46ad19801e3..43d369dc942 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,11 +1,11 @@ -pytest==7.3.0 +pytest==7.3.1 pytest-celery==0.0.0 pytest-subtests==0.10.0 pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 -boto3==1.26.110 -moto==4.1.6 +boto3>=1.26.114 +moto==4.1.7 # typing extensions mypy==1.2.0; platform_python_implementation=="CPython" pre-commit==2.21.0 From 65da1cfe7b35cda9836f97205ec8d31b5e459e57 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 19 Apr 2023 15:29:18 +0300 Subject: [PATCH 0365/1051] Added link to relevant backend config in docs for worker_deduplicate_successful_tasks (#8209) --- docs/userguide/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 79f621cce4f..c3f60abe0ac 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2924,8 +2924,8 @@ received the task. This cache can be made persistent by setting the :setting:`worker_state_db` setting. -If the result backend is not persistent (the RPC backend, for example), -this setting is ignored. +If the result backend is not `persistent `_ +(the RPC backend, for example), this setting is ignored. .. _conf-concurrency: From 4bcac7a074c6aa1b23fcf0b89e79261735ddad0e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 19 Apr 2023 16:25:17 +0300 Subject: [PATCH 0366/1051] Renamed revoked_headers to revoked_stamps (#8210) --- celery/worker/control.py | 2 +- celery/worker/request.py | 16 ++++++++-------- celery/worker/state.py | 4 ++-- t/integration/test_tasks.py | 4 ++-- t/unit/worker/test_control.py | 10 +++++----- t/unit/worker/test_state.py | 2 +- 6 files changed, 19 insertions(+), 19 deletions(-) diff --git a/celery/worker/control.py b/celery/worker/control.py index 6676fe71033..2a3e195eeff 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -171,7 +171,7 @@ def revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kw if isinstance(headers, list): headers = {h.split('=')[0]: h.split('=')[1] for h in headers} - worker_state.revoked_headers.update(headers) + worker_state.revoked_stamps.update(headers) if not terminate: return ok(f'headers {headers} flagged as revoked') diff --git a/celery/worker/request.py b/celery/worker/request.py index ff8020a6f0f..98a33bca102 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -61,7 +61,7 @@ def __optimize__(): task_accepted = state.task_accepted task_ready = state.task_ready revoked_tasks = state.revoked -revoked_headers = state.revoked_headers +revoked_stamps = state.revoked_stamps class Request: @@ -469,20 +469,20 @@ def revoked(self): revoked_by_header, revoking_header = False, None if not revoked_by_id and self.stamped_headers: - for header in self.stamped_headers: - if header in revoked_headers: - revoked_header = revoked_headers[header] - stamped_header = self._message.headers['stamps'][header] + for stamp in self.stamped_headers: + if stamp in revoked_stamps: + revoked_header = revoked_stamps[stamp] + stamped_header = self._message.headers['stamps'][stamp] if isinstance(stamped_header, (list, tuple)): for stamped_value in stamped_header: if stamped_value in maybe_list(revoked_header): revoked_by_header = True - revoking_header = {header: stamped_value} + revoking_header = {stamp: stamped_value} break else: - revoked_by_header = stamped_header in revoked_headers[header] - revoking_header = {header: stamped_header} + revoked_by_header = stamped_header in revoked_stamps[stamp] + revoking_header = {stamp: stamped_header} break if any((expired, revoked_by_id, revoked_by_header)): diff --git a/celery/worker/state.py b/celery/worker/state.py index 1c7ab3942fa..8c70bbd9806 100644 --- a/celery/worker/state.py +++ b/celery/worker/state.py @@ -68,7 +68,7 @@ revoked = LimitedSet(maxlen=REVOKES_MAX, expires=REVOKE_EXPIRES) #: Mapping of stamped headers flagged for revoking. -revoked_headers = {} +revoked_stamps = {} should_stop = None should_terminate = None @@ -82,7 +82,7 @@ def reset_state(): total_count.clear() all_total_count[:] = [0] revoked.clear() - revoked_headers.clear() + revoked_stamps.clear() def maybe_shutdown(): diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 3a2432114e2..5c5f6541286 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -234,7 +234,7 @@ def on_signature(self, sig, **headers) -> dict: assert result.ready() is True assert result.failed() is False assert result.successful() is True - worker_state.revoked_headers.clear() + worker_state.revoked_stamps.clear() # Try to purge the queue after we're done # to attempt to avoid interference to other tests @@ -294,7 +294,7 @@ def on_signature(self, sig, **headers) -> dict: assert result.ready() is True assert result.failed() is False assert result.successful() is False - worker_state.revoked_headers.clear() + worker_state.revoked_stamps.clear() # Try to purge the queue after we're done # to attempt to avoid interference to other tests diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index 10c964cab39..d3afa66c03a 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -17,7 +17,7 @@ from celery.worker import state as worker_state from celery.worker.pidbox import Pidbox, gPidbox from celery.worker.request import Request -from celery.worker.state import REVOKE_EXPIRES, revoked, revoked_headers +from celery.worker.state import REVOKE_EXPIRES, revoked, revoked_stamps hostname = socket.gethostname() @@ -554,7 +554,7 @@ def test_revoke_by_stamped_headers_terminate(self): worker_state.task_reserved(request) try: r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) - assert stamped_header == revoked_headers + assert stamped_header == revoked_stamps assert 'terminate:' in r['ok'] # unknown task id only revokes r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) @@ -602,12 +602,12 @@ def test_revoke_by_stamped_headers(self, header_to_revoke): state = self.create_state() state.consumer = Mock() # Revoke by header - revoked_headers.clear() + revoked_stamps.clear() r = control.revoke_by_stamped_headers(state, header_to_revoke, terminate=True) # Check all of the requests were revoked by a single header assert all([id in r['ok'] for id in ids]), "All requests should be revoked" - assert revoked_headers == header_to_revoke - revoked_headers.clear() + assert revoked_stamps == header_to_revoke + revoked_stamps.clear() def test_revoke_return_value_terminate_true(self): header_to_revoke = {'foo': 'bar'} diff --git a/t/unit/worker/test_state.py b/t/unit/worker/test_state.py index cf67aa25957..d020f631829 100644 --- a/t/unit/worker/test_state.py +++ b/t/unit/worker/test_state.py @@ -19,7 +19,7 @@ def reset_state(): yield state.active_requests.clear() state.revoked.clear() - state.revoked_headers.clear() + state.revoked_stamps.clear() state.total_count.clear() From 3b1c768c39f641a70d3086bd5a1079a421f94344 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 May 2023 17:30:35 +0000 Subject: [PATCH 0367/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.3.1 → v3.3.2](https://github.com/asottile/pyupgrade/compare/v3.3.1...v3.3.2) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 00b916b6f96..0276146c1cd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.3.1 + rev: v3.3.2 hooks: - id: pyupgrade args: ["--py37-plus"] From 21c40c66ee6310290b71f1a6bcd8f532384c1649 Mon Sep 17 00:00:00 2001 From: Isaac To Date: Thu, 4 May 2023 21:52:40 -0700 Subject: [PATCH 0368/1051] Ensure argument for `map` is JSON serializable --- docs/userguide/canvas.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 32042054758..8264f531fa4 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1037,7 +1037,7 @@ For example using ``map``: >>> from proj.tasks import add - >>> ~tsum.map([range(10), range(100)]) + >>> ~tsum.map([list(range(10)), list(range(100))]) [45, 4950] is the same as having a task doing: From e3c0bbac17f17ecfec945906c538c5b0e67e591e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 May 2023 17:33:52 +0000 Subject: [PATCH 0369/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.3.2 → v3.4.0](https://github.com/asottile/pyupgrade/compare/v3.3.2...v3.4.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0276146c1cd..de983ddcdf2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.3.2 + rev: v3.4.0 hooks: - id: pyupgrade args: ["--py37-plus"] From 496e06e2776bec7e21dc631fead6a91e5c766f9c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 10 May 2023 16:48:06 +0300 Subject: [PATCH 0370/1051] =?UTF-8?q?Bump=20version:=205.3.0b2=20=E2=86=92?= =?UTF-8?q?=205.3.0rc1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 144713a9d3f..874bd6d88c7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.0b2 +current_version = 5.3.0rc1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index a26230e61ec..952f684a772 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0b2 (dawn-chorus) +:Version: 5.3.0rc1 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 16c16d85b1d..32bb3c56572 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.3.0b2' +__version__ = '5.3.0rc1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 66d4ea1b592..bb2643ac0b2 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0b2 (dawn-chorus) +:Version: 5.3.0rc1 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From b4d23f290713ebea25ab517d9f980ae542885577 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 11 May 2023 19:37:16 +0300 Subject: [PATCH 0371/1051] Changelog hotfix (#8245) * Added changelog for v5.3.0b2 * Added changelog for v5.3.0rc1 * Removed [pre-commit.ci] logs --- Changelog.rst | 202 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 199 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index edb2a51b1e5..72095626d5b 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,205 @@ This document contains change notes for bugfix & new features in the main branch & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. +.. _version-5.3.0rc1: + +5.3.0rc1 +======= + +:release-date: 2023-05-11 4:24 P.M GMT+2 +:release-by: Tomer Nosrati + +- fix functiom name by @cuishuang in #8087 +- Update CELERY_TASK_EAGER setting in user guide by @thebalaa in #8085 +- Stamping documentation fixes & cleanups by @Nusnus in #8092 +- switch to maintained pyro5 by @auvipy in #8093 +- udate dependencies of tests by @auvipy in #8095 +- cryptography==39.0.1 by @auvipy in #8096 +- Annotate celery/security/certificate.py by @Kludex in #7398 +- Deprecate parse_iso8601 in favor of fromisoformat by @stumpylog in #8098 +- pytest==7.2.2 by @auvipy in #8106 +- Type annotations for celery/utils/text.py by @max-muoto in #8107 +- Update web framework URLs by @sblondon in #8112 +- Fix contribution URL by @sblondon in #8111 +- Trying to clarify CERT_REQUIRED by @pamelafox in #8113 +- Fix potential AttributeError on 'stamps' by @Darkheir in #8115 +- Type annotations for celery/apps/beat.py by @max-muoto in #8108 +- Fixed bug where retrying a task loses its stamps by @Nusnus in #8120 +- Type hints for celery/schedules.py by @max-muoto in #8114 +- Reference Gopher Celery in README by @marselester in #8131 +- Update sqlalchemy.txt by @auvipy in #8136 +- azure-storage-blob 12.15.0 by @auvipy in #8137 +- test kombu 5.3.0b3 by @auvipy in #8138 +- fix: add expire string parse. by @Bidaya0 in #8134 +- Fix worker crash on un-pickleable exceptions by @youtux in #8133 +- CLI help output: avoid text rewrapping by click by @woutdenolf in #8152 +- Warn when an unnamed periodic task override another one. by @iurisilvio in #8143 +- Fix Task.handle_ignore not wrapping exceptions properly by @youtux in #8149 +- Hotfix for (#8120) - Stamping bug with retry by @Nusnus in #8158 +- Fix integration test by @youtux in #8156 +- Fixed bug in revoke_by_stamped_headers where impl did not match doc by @Nusnus in #8162 +- Align revoke and revoke_by_stamped_headers return values (terminate=True) by @Nusnus in #8163 +- Update & simplify GHA pip caching by @stumpylog in #8164 +- Update auth.txt by @auvipy in #8167 +- Update test.txt versions by @auvipy in #8173 +- remove extra = from test.txt by @auvipy in #8179 +- Update sqs.txt kombu[sqs]>=5.3.0b3 by @auvipy in #8174 +- Added signal triggered before fork by @jaroslawporada in #8177 +- Update documentation on SQLAlchemy by @max-muoto in #8188 +- Deprecate pytz and use zoneinfo by @max-muoto in #8159 +- Update dev.txt by @auvipy in #8192 +- Update test.txt by @auvipy in #8193 +- Update test-integration.txt by @auvipy in #8194 +- Update zstd.txt by @auvipy in #8195 +- Update s3.txt by @auvipy in #8196 +- Update msgpack.txt by @auvipy in #8199 +- Update solar.txt by @auvipy in #8198 +- Add Semgrep to CI by @Nusnus in #8201 +- Added semgrep to README.rst by @Nusnus in #8202 +- Update django.txt by @auvipy in #8197 +- Update redis.txt 4.3.6 by @auvipy in #8161 +- start removing codecov from pypi by @auvipy in #8206 +- Update test.txt dependencies by @auvipy in #8205 +- Improved doc for: worker_deduplicate_successful_tasks by @Nusnus in #8209 +- Renamed revoked_headers to revoked_stamps by @Nusnus in #8210 +- Ensure argument for map is JSON serializable by @candleindark in #8229 + +.. _version-5.3.0b2: + +5.3.0b2 +======= + +:release-date: 2023-02-19 1:47 P.M GMT+2 +:release-by: Asif Saif Uddin + +- BLM-2: Adding unit tests to chord clone by @Nusnus in #7668 +- Fix unknown task error typo by @dcecile in #7675 +- rename redis integration test class so that tests are executed by @wochinge in #7684 +- Check certificate/private key type when loading them by @qrmt in #7680 +- Added integration test_chord_header_id_duplicated_on_rabbitmq_msg_duplication() by @Nusnus in #7692 +- New feature flag: allow_error_cb_on_chord_header - allowing setting an error callback on chord header by @Nusnus in #7712 +- Update README.rst sorting Python/Celery versions by @andrebr in #7714 +- Fixed a bug where stamping a chord body would not use the correct stamping method by @Nusnus in #7722 +- Fixed doc duplication typo for Signature.stamp() by @Nusnus in #7725 +- Fix issue 7726: variable used in finally block may not be instantiated by @woutdenolf in #7727 +- Fixed bug in chord stamping with another chord as a body + unit test by @Nusnus in #7730 +- Use "describe_table" not "create_table" to check for existence of DynamoDB table by @maxfirman in #7734 +- Enhancements for task_allow_error_cb_on_chord_header tests and docs by @Nusnus in #7744 +- Improved custom stamping visitor documentation by @Nusnus in #7745 +- Improved the coverage of test_chord_stamping_body_chord() by @Nusnus in #7748 +- billiard >= 3.6.3.0,<5.0 for rpm by @auvipy in #7764 +- Fixed memory leak with ETA tasks at connection error when worker_cancel_long_running_tasks_on_connection_loss is enabled by @Nusnus in #7771 +- Fixed bug where a chord with header of type tuple was not supported in the link_error flow for task_allow_error_cb_on_chord_header flag by @Nusnus in #7772 +- Scheduled weekly dependency update for week 38 by @pyup-bot in #7767 +- recreate_module: set spec to the new module by @skshetry in #7773 +- Override integration test config using integration-tests-config.json by @thedrow in #7778 +- Fixed error handling bugs due to upgrade to a newer version of billiard by @Nusnus in #7781 +- Do not recommend using easy_install anymore by @jugmac00 in #7789 +- GitHub Workflows security hardening by @sashashura in #7768 +- Update ambiguous acks_late doc by @Zhong-z in #7728 +- billiard >=4.0.2,<5.0 by @auvipy in #7720 +- importlib_metadata remove deprecated entry point interfaces by @woutdenolf in #7785 +- Scheduled weekly dependency update for week 41 by @pyup-bot in #7798 +- pyzmq>=22.3.0 by @auvipy in #7497 +- Remove amqp from the BACKEND_ALISES list by @Kludex in #7805 +- Replace print by logger.debug by @Kludex in #7809 +- Ignore coverage on except ImportError by @Kludex in #7812 +- Add mongodb dependencies to test.txt by @Kludex in #7810 +- Fix grammar typos on the whole project by @Kludex in #7815 +- Remove isatty wrapper function by @Kludex in #7814 +- Remove unused variable _range by @Kludex in #7813 +- Add type annotation on concurrency/threads.py by @Kludex in #7808 +- Fix linter workflow by @Kludex in #7816 +- Scheduled weekly dependency update for week 42 by @pyup-bot in #7821 +- Remove .cookiecutterrc by @Kludex in #7830 +- Remove .coveragerc file by @Kludex in #7826 +- kombu>=5.3.0b2 by @auvipy in #7834 +- Fix readthedocs build failure by @woutdenolf in #7835 +- Fixed bug in group, chord, chain stamp() method, where the visitor overrides the previously stamps in tasks of these objects by @Nusnus in #7825 +- Stabilized test_mutable_errback_called_by_chord_from_group_fail_multiple by @Nusnus in #7837 +- Use SPDX license expression in project metadata by @RazerM in #7845 +- New control command revoke_by_stamped_headers by @Nusnus in #7838 +- Clarify wording in Redis priority docs by @strugee in #7853 +- Fix non working example of using celery_worker pytest fixture by @paradox-lab in #7857 +- Removed the mandatory requirement to include stamped_headers key when implementing on_signature() by @Nusnus in #7856 +- Update serializer docs by @sondrelg in #7858 +- Remove reference to old Python version by @Kludex in #7829 +- Added on_replace() to Task to allow manipulating the replaced sig with custom changes at the end of the task.replace() by @Nusnus in #7860 +- Add clarifying information to completed_count documentation by @hankehly in #7873 +- Stabilized test_revoked_by_headers_complex_canvas by @Nusnus in #7877 +- StampingVisitor will visit the callbacks and errbacks of the signature by @Nusnus in #7867 +- Fix "rm: no operand" error in clean-pyc script by @hankehly in #7878 +- Add --skip-checks flag to bypass django core checks by @mudetz in #7859 +- Scheduled weekly dependency update for week 44 by @pyup-bot in #7868 +- Added two new unit tests to callback stamping by @Nusnus in #7882 +- Sphinx extension: use inspect.signature to make it Python 3.11 compatible by @mathiasertl in #7879 +- cryptography==38.0.3 by @auvipy in #7886 +- Canvas.py doc enhancement by @Nusnus in #7889 +- Fix typo by @sondrelg in #7890 +- fix typos in optional tests by @hsk17 in #7876 +- Canvas.py doc enhancement by @Nusnus in #7891 +- Fix revoke by headers tests stability by @Nusnus in #7892 +- feat: add global keyprefix for backend result keys by @kaustavb12 in #7620 +- Canvas.py doc enhancement by @Nusnus in #7897 +- fix(sec): upgrade sqlalchemy to 1.2.18 by @chncaption in #7899 +- Canvas.py doc enhancement by @Nusnus in #7902 +- Fix test warnings by @ShaheedHaque in #7906 +- Support for out-of-tree worker pool implementations by @ShaheedHaque in #7880 +- Canvas.py doc enhancement by @Nusnus in #7907 +- Use bound task in base task example. Closes #7909 by @WilliamDEdwards in #7910 +- Allow the stamping visitor itself to set the stamp value type instead of casting it to a list by @Nusnus in #7914 +- Stamping a task left the task properties dirty by @Nusnus in #7916 +- Fixed bug when chaining a chord with a group by @Nusnus in #7919 +- Fixed bug in the stamping visitor mechanism where the request was lacking the stamps in the 'stamps' property by @Nusnus in #7928 +- Fixed bug in task_accepted() where the request was not added to the requests but only to the active_requests by @Nusnus in #7929 +- Fix bug in TraceInfo._log_error() where the real exception obj was hiding behind 'ExceptionWithTraceback' by @Nusnus in #7930 +- Added integration test: test_all_tasks_of_canvas_are_stamped() by @Nusnus in #7931 +- Added new example for the stamping mechanism: examples/stamping by @Nusnus in #7933 +- Fixed a bug where replacing a stamped task and stamping it again by @Nusnus in #7934 +- Bugfix for nested group stamping on task replace by @Nusnus in #7935 +- Added integration test test_stamping_example_canvas() by @Nusnus in #7937 +- Fixed a bug in losing chain links when unchaining an inner chain with links by @Nusnus in #7938 +- Removing as not mandatory by @auvipy in #7885 +- Housekeeping for Canvas.py by @Nusnus in #7942 +- Scheduled weekly dependency update for week 50 by @pyup-bot in #7954 +- try pypy 3.9 in CI by @auvipy in #7956 +- sqlalchemy==1.4.45 by @auvipy in #7943 +- billiard>=4.1.0,<5.0 by @auvipy in #7957 +- feat(typecheck): allow changing type check behavior on the app level; by @moaddib666 in #7952 +- Add broker_channel_error_retry option by @nkns165 in #7951 +- Add beat_cron_starting_deadline_seconds to prevent unwanted cron runs by @abs25 in #7945 +- Scheduled weekly dependency update for week 51 by @pyup-bot in #7965 +- Added doc to "retry_errors" newly supported field of "publish_retry_policy" of the task namespace by @Nusnus in #7967 +- Renamed from master to main in the docs and the CI workflows by @Nusnus in #7968 +- Fix docs for the exchange to use with worker_direct by @alessio-b2c2 in #7973 +- Pin redis==4.3.4 by @auvipy in #7974 +- return list of nodes to make sphinx extension compatible with Sphinx 6.0 by @mathiasertl in #7978 +- use version range redis>=4.2.2,<4.4.0 by @auvipy in #7980 +- Scheduled weekly dependency update for week 01 by @pyup-bot in #7987 +- Add annotations to minimise differences with celery-aio-pool's tracer.py. by @ShaheedHaque in #7925 +- Fixed bug where linking a stamped task did not add the stamp to the link's options by @Nusnus in #7992 +- sqlalchemy==1.4.46 by @auvipy in #7995 +- pytz by @auvipy in #8002 +- Fix few typos, provide configuration + workflow for codespell to catch any new by @yarikoptic in #8023 +- RabbitMQ links update by @arnisjuraga in #8031 +- Ignore files generated by tests by @Kludex in #7846 +- Revert "sqlalchemy==1.4.46 (#7995)" by @Nusnus in #8033 +- Fixed bug with replacing a stamped task with a chain or a group (inc. links/errlinks) by @Nusnus in #8034 +- Fixed formatting in setup.cfg that caused flake8 to misbehave by @Nusnus in #8044 +- Removed duplicated import Iterable by @Nusnus in #8046 +- Fix docs by @Nusnus in #8047 +- Document --logfile default by @strugee in #8057 +- Stamping Mechanism Refactoring by @Nusnus in #8045 +- result_backend_thread_safe config shares backend across threads by @CharlieTruong in #8058 +- Fix cronjob that use day of month and negative UTC timezone by @pkyosx in #8053 +- Stamping Mechanism Examples Refactoring by @Nusnus in #8060 +- Fixed bug in Task.on_stamp_replaced() by @Nusnus in #8061 +- Stamping Mechanism Refactoring 2 by @Nusnus in #8064 +- Changed default append_stamps from True to False (meaning duplicates … by @Nusnus in #8068 +- typo in comment: mailicious => malicious by @yanick in #8072 +- Fix command for starting flower with specified broker URL by @ShukantPal in #8071 +- Improve documentation on ETA/countdown tasks (#8069) by @norbertcyran in #8075 + .. _version-5.3.0b1: 5.3.0b1 @@ -27,7 +226,6 @@ an overview of what's new in Celery 5.2. - Only clear the cache if there are no active writers. - Billiard 4.0.1 - .. _version-5.3.0a1: 5.3.0a1 @@ -74,8 +272,6 @@ an overview of what's new in Celery 5.2. - test kombu>=5.3.0a1,<6.0 (#7598). - Canvas Header Stamping (#7384). - - .. _version-5.2.7: 5.2.7 From 53f2191dc67a61574c723770d73bb2f4f6ddc399 Mon Sep 17 00:00:00 2001 From: woutdenolf Date: Fri, 12 May 2023 12:48:21 +0200 Subject: [PATCH 0372/1051] add missing dependency --- requirements/default.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 4678436d793..57fe1b5c950 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -7,4 +7,5 @@ click-repl>=0.2.0 click-plugins>=1.1.1 importlib-metadata>=3.6; python_version < '3.8' backports.zoneinfo>=0.2.1; python_version < '3.9' -tzdata>=2022.7 \ No newline at end of file +tzdata>=2022.7 +python-dateutil>=2.8.2 \ No newline at end of file From 1baca0ca90b5bd7f38e3d2ae2d513f24cc0613ea Mon Sep 17 00:00:00 2001 From: Stevie Gayet <87695919+stegayet@users.noreply.github.com> Date: Sun, 14 May 2023 17:03:59 +0200 Subject: [PATCH 0373/1051] chore(build): clean `setup.py` (#8248) * chore(build): remove `cmdclass` parameter * chore(build): remove deprecated `zip_safe` parameter * chore(build): remove `include_package_data` parameter --------- Co-authored-by: Stevie Gayet --- setup.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/setup.py b/setup.py index 8000d5b3c42..480ed33d2f2 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,6 @@ import codecs import os import re -import sys import setuptools import setuptools.command.test @@ -132,22 +131,6 @@ def long_description(): except OSError: return 'Long description error: Missing README.rst file' -# -*- Command: setup.py test -*- - - -class pytest(setuptools.command.test.test): - user_options = [('pytest-args=', 'a', 'Arguments to pass to pytest')] - - def initialize_options(self): - super().initialize_options() - self.pytest_args = [] - - def run_tests(self): - import pytest as _pytest - sys.exit(_pytest.main(self.pytest_args)) - -# -*- %%% -*- - meta = parse_dist_meta() setuptools.setup( @@ -166,9 +149,6 @@ def run_tests(self): python_requires=">=3.7", tests_require=reqs('test.txt'), extras_require=extras_require(), - cmdclass={'test': pytest}, - include_package_data=True, - zip_safe=False, entry_points={ 'console_scripts': [ 'celery = celery.__main__:main', From eb4173db7b530706284827c8c3a41636551d53ff Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 May 2023 17:28:03 +0000 Subject: [PATCH 0374/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.2.0 → v1.3.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.2.0...v1.3.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de983ddcdf2..6464f96e8f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.2.0 + rev: v1.3.0 hooks: - id: mypy pass_filenames: false From 87e46299255e4d63c6366eaab50560d7bff505c2 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 18 May 2023 12:31:52 +0600 Subject: [PATCH 0375/1051] Update python-package.yml to drop python 3.7 from CI --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ab135fefc7f..a8b9f963d37 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.9', 'pypy-3.8'] + python-version: ['3.8', '3.9', '3.10', 'pypy-3.9', 'pypy-3.8'] os: ["ubuntu-latest", "windows-latest"] exclude: - python-version: 'pypy-3.9' @@ -74,7 +74,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10'] toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] services: From e7b47a62d789557cf18ed0e56e2dfb99a51a62f7 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 18 May 2023 12:45:20 +0600 Subject: [PATCH 0376/1051] Update test-ci-base.txt --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 194beedf31f..72be056e56d 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,5 +1,5 @@ pytest-cov==4.0.0 -pytest-github-actions-annotate-failures==0.1.8 +pytest-github-actions-annotate-failures==0.2.0 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt From e2985d2c4277eb870c3ddf684bac8103ee574fe1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 23 May 2023 20:48:38 +0600 Subject: [PATCH 0377/1051] Update test.txt dependencies (#8263) --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 43d369dc942..c9b99a88e0f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,13 +1,13 @@ pytest==7.3.1 pytest-celery==0.0.0 -pytest-subtests==0.10.0 +pytest-subtests==0.11.0 pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 boto3>=1.26.114 -moto==4.1.7 +moto==4.1.10 # typing extensions -mypy==1.2.0; platform_python_implementation=="CPython" +mypy==1.3.0; platform_python_implementation=="CPython" pre-commit==2.21.0 -r extras/yaml.txt -r extras/msgpack.txt From 1eee438df66000de4ceeb9f95756b33baa7f6bf2 Mon Sep 17 00:00:00 2001 From: Bartosz Nowotny Date: Mon, 22 May 2023 14:01:21 +0200 Subject: [PATCH 0378/1051] Fix exc_type being the exception instance rather than the exception type --- celery/app/trace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/trace.py b/celery/app/trace.py index df949ce2cdb..59bcb5182c0 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -222,7 +222,7 @@ def handle_failure(self, task, req, store_errors=True, call_errbacks=True): # a traceback. _, _, exc.__traceback__ = sys.exc_info() - exc_type = get_pickleable_etype(orig_exc) + exc_type = get_pickleable_etype(type(orig_exc)) # make sure we only send pickleable exceptions back to parent. einfo = ExceptionInfo(exc_info=(exc_type, exc, exc.__traceback__)) From 2a28aa38e8cddbf4d5fcff22fd927f6e0231be26 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 18:48:54 +0600 Subject: [PATCH 0379/1051] revert to pyro.txt 4 for the time being --- requirements/extras/pyro.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pyro.txt b/requirements/extras/pyro.txt index bb73cdd74f2..d19b0db3892 100644 --- a/requirements/extras/pyro.txt +++ b/requirements/extras/pyro.txt @@ -1 +1 @@ -pyro5 +pyro4 From a1aecb7a78c034ecd4c56f245c533a6220c3366d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 20:08:45 +0600 Subject: [PATCH 0380/1051] Update auth.txt cryptography==40.0.2 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index bb6e5788554..6e51e89542f 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==40.0.1 +cryptography==40.0.2 From c72e5d6d96529fa2ee1b259ee598079ad4952156 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 20:11:22 +0600 Subject: [PATCH 0381/1051] Update s3.txt boto3=>1.26.139 --- requirements/extras/s3.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/s3.txt b/requirements/extras/s3.txt index 2dadf569710..3ab5a7bd1df 100644 --- a/requirements/extras/s3.txt +++ b/requirements/extras/s3.txt @@ -1 +1 @@ -boto3==1.26.110 +boto3=>1.26.139 From bcdf294047d6b438f2ee77fd7c085923061c2d61 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 20:13:18 +0600 Subject: [PATCH 0382/1051] Update zstd.txt 0.21.0 --- requirements/extras/zstd.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index 0236020bc1f..d7c173723ed 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.20.0 +zstandard==0.21.0 From c6b54074514b14b5b7b3d8e6a4885fc1699a3e39 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 20:19:59 +0600 Subject: [PATCH 0383/1051] Update sqlalchemy.txt sqlalchemy>=1.4.48,<2.0 --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 1191b6925c4..4e6c56a2d6c 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy>=1.4.47,<2.0 +sqlalchemy>=1.4.48,<2.0 From 12a2d7a7b3ec33a63578e2d3cf34d45d7da2b831 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 21:19:04 +0600 Subject: [PATCH 0384/1051] Update s3.txt boto3>=1.26.139 typo --- requirements/extras/s3.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/s3.txt b/requirements/extras/s3.txt index 3ab5a7bd1df..9697544c76b 100644 --- a/requirements/extras/s3.txt +++ b/requirements/extras/s3.txt @@ -1 +1 @@ -boto3=>1.26.139 +boto3>=1.26.139 From fe1b4228527c150a2097e3610cff6ccebc3063ea Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 24 May 2023 21:35:46 +0600 Subject: [PATCH 0385/1051] Update default.txt to 5.3.0rc1 --- requirements/default.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index 57fe1b5c950..0c25f442bb0 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.1.0,<5.0 -kombu>=5.3.0b3,<6.0 +kombu>=5.3.0rc1,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 @@ -8,4 +8,4 @@ click-plugins>=1.1.1 importlib-metadata>=3.6; python_version < '3.8' backports.zoneinfo>=0.2.1; python_version < '3.9' tzdata>=2022.7 -python-dateutil>=2.8.2 \ No newline at end of file +python-dateutil>=2.8.2 From 51d4fc83dfd8882326031f27911fead3f0b5e624 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 26 May 2023 20:04:30 +0300 Subject: [PATCH 0386/1051] Fixed revoking tasks by stamped headers (#8269) * Fixed bug in Request.stamps and Request.stamped_headers where None/List types were not handled correctly * Changed revoke_by_stamped_headers terminate flow and return value for improved readability on the revoked task * Fixed bug where worker_state.revoked_stamps update in revoke_by_stamped_headers did not handle None/List types correctly * Fixed bug where Request.revoked() did not handle None/List types correctly * Doc + Cleanup + Lint fixes + fine-tuning the bugfixes * Fixed test_revoke_by_stamped_headers_terminate() * Fixed test_revoke_by_stamped_headers() + small bugfix found by the fixed test * Fixed test_revoke_return_value_terminate_true() * Fixed test_revoked_by_headers_simple_canvas() + improved test doc * Added test flow for terminate=False in test_revoke_by_stamped_headers_terminate() to improve code coverage * Code coverage found untested bugged code -> this fixes the bug and improves testing coverage --- celery/worker/control.py | 69 +++++++++++++++++------------------ celery/worker/request.py | 10 +++-- t/integration/test_tasks.py | 8 +++- t/unit/worker/test_control.py | 31 ++++++++++------ t/unit/worker/test_request.py | 29 ++++++++++++++- 5 files changed, 96 insertions(+), 51 deletions(-) diff --git a/celery/worker/control.py b/celery/worker/control.py index 2a3e195eeff..41d059e4116 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -1,13 +1,12 @@ """Worker remote control command implementations.""" import io import tempfile -import warnings -from collections import UserDict, namedtuple +from collections import UserDict, defaultdict, namedtuple from billiard.common import TERM_SIGNAME from kombu.utils.encoding import safe_repr -from celery.exceptions import CeleryWarning, WorkerShutdown +from celery.exceptions import WorkerShutdown from celery.platforms import signals as _signals from celery.utils.functional import maybe_list from celery.utils.log import get_logger @@ -161,54 +160,54 @@ def revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kw """Revoke task by header (or list of headers). Keyword Arguments: + headers(dictionary): Dictionary that contains stamping scheme name as keys and stamps as values. + If headers is a list, it will be converted to a dictionary. terminate (bool): Also terminate the process if the task is active. signal (str): Name of signal to use for terminate (e.g., ``KILL``). + Sample headers input: + {'mtask_id': [id1, id2, id3]} """ # pylint: disable=redefined-outer-name # XXX Note that this redefines `terminate`: # Outside of this scope that is a function. # supports list argument since 3.1 + signum = _signals.signum(signal or TERM_SIGNAME) + if isinstance(headers, list): headers = {h.split('=')[0]: h.split('=')[1] for h in headers} - worker_state.revoked_stamps.update(headers) + for header, stamps in headers.items(): + updated_stamps = maybe_list(worker_state.revoked_stamps.get(header) or []) + list(maybe_list(stamps)) + worker_state.revoked_stamps[header] = updated_stamps if not terminate: - return ok(f'headers {headers} flagged as revoked') + return ok(f'headers {headers} flagged as revoked, but not terminated') - task_ids = set() active_requests = list(worker_state.active_requests) - # Terminate all running tasks of matching headers - if active_requests: - warnings.warn( - "Terminating tasks by headers does not scale well when worker concurrency is high", - CeleryWarning - ) - - # Go through all active requests, and check if one of the - # requests has a stamped header that matches the given headers to revoke - - req: Request - for req in active_requests: - # Check stamps exist - if req.stamped_headers and req.stamps: - # if so, check if any of the stamped headers match the given headers - for expected_header_key, expected_header_value in headers.items(): - if expected_header_key in req.stamps: - actual_header = req.stamps[expected_header_key] - # Check any possible match regardless if the stamps are a sequence or not - if any([ - header in maybe_list(expected_header_value) - for header in maybe_list(actual_header) - ]): - task_ids.add(req.task_id) - continue + terminated_scheme_to_stamps_mapping = defaultdict(set) - task_ids = _revoke(state, task_ids, terminate, signal, **kwargs) - if isinstance(task_ids, dict): - return task_ids - return ok(list(task_ids)) + # Terminate all running tasks of matching headers + # Go through all active requests, and check if one of the + # requests has a stamped header that matches the given headers to revoke + + for req in active_requests: + # Check stamps exist + if hasattr(req, "stamps") and req.stamps: + # if so, check if any stamps match a revoked stamp + for expected_header_key, expected_header_value in headers.items(): + if expected_header_key in req.stamps: + expected_header_value = maybe_list(expected_header_value) + actual_header = maybe_list(req.stamps[expected_header_key]) + matching_stamps_for_request = set(actual_header) & set(expected_header_value) + # Check any possible match regardless if the stamps are a sequence or not + if matching_stamps_for_request: + terminated_scheme_to_stamps_mapping[expected_header_key].update(matching_stamps_for_request) + req.terminate(state.consumer.pool, signal=signum) + + if not terminated_scheme_to_stamps_mapping: + return ok(f'headers {headers} were not terminated') + return ok(f'headers {terminated_scheme_to_stamps_mapping} revoked') def _revoke(state, task_ids, terminate=False, signal=None, **kwargs): diff --git a/celery/worker/request.py b/celery/worker/request.py index 98a33bca102..5d7c93a467c 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -323,11 +323,12 @@ def groups(self): @property def stamped_headers(self) -> list: - return self._request_dict.get('stamped_headers', []) + return self._request_dict.get('stamped_headers') or [] @property def stamps(self) -> dict: - return {header: self._request_dict['stamps'][header] for header in self.stamped_headers} + stamps = self._request_dict.get('stamps') or {} + return {header: stamps.get(header) for header in self.stamped_headers} @property def correlation_id(self): @@ -481,7 +482,10 @@ def revoked(self): revoking_header = {stamp: stamped_value} break else: - revoked_by_header = stamped_header in revoked_stamps[stamp] + revoked_by_header = any([ + stamped_header in maybe_list(revoked_header), + stamped_header == revoked_header, # When the header is a single set value + ]) revoking_header = {stamp: stamped_header} break diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 5c5f6541286..31f6659e722 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -234,7 +234,13 @@ def on_signature(self, sig, **headers) -> dict: assert result.ready() is True assert result.failed() is False assert result.successful() is True - worker_state.revoked_stamps.clear() + + # Clear the set of revoked stamps in the worker state. + # This step is performed in each iteration of the loop to ensure that only tasks + # stamped with a specific monitoring ID will be revoked. + # For subsequent iterations with different monitoring IDs, the revoked stamps will + # not match the task's stamps, allowing those tasks to proceed successfully. + worker_state.revoked_stamps.clear() # Try to purge the queue after we're done # to attempt to avoid interference to other tests diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index d3afa66c03a..df1c8c4c04b 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -11,6 +11,7 @@ from kombu.utils.uuid import uuid from celery.utils.collections import AttributeDict +from celery.utils.functional import maybe_list from celery.utils.timer2 import Timer from celery.worker import WorkController as _WC from celery.worker import consumer, control @@ -544,7 +545,10 @@ def test_revoke_terminate(self): finally: worker_state.task_ready(request) - def test_revoke_by_stamped_headers_terminate(self): + @pytest.mark.parametrize( + "terminate", [True, False], + ) + def test_revoke_by_stamped_headers_terminate(self, terminate): request = Mock() request.id = uuid() request.options = stamped_header = {'stamp': 'foo'} @@ -553,12 +557,12 @@ def test_revoke_by_stamped_headers_terminate(self): state.consumer = Mock() worker_state.task_reserved(request) try: - r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) - assert stamped_header == revoked_stamps - assert 'terminate:' in r['ok'] - # unknown task id only revokes - r = control.revoke_by_stamped_headers(state, stamped_header, terminate=True) - assert 'tasks unknown' in r['ok'] + worker_state.revoked_stamps.clear() + assert stamped_header.keys() != revoked_stamps.keys() + control.revoke_by_stamped_headers(state, stamped_header, terminate=terminate) + assert stamped_header.keys() == revoked_stamps.keys() + for key in stamped_header.keys(): + assert maybe_list(stamped_header[key]) == revoked_stamps[key] finally: worker_state.task_ready(request) @@ -605,8 +609,13 @@ def test_revoke_by_stamped_headers(self, header_to_revoke): revoked_stamps.clear() r = control.revoke_by_stamped_headers(state, header_to_revoke, terminate=True) # Check all of the requests were revoked by a single header - assert all([id in r['ok'] for id in ids]), "All requests should be revoked" - assert revoked_stamps == header_to_revoke + for header, stamp in header_to_revoke.items(): + assert header in r['ok'] + for s in maybe_list(stamp): + assert str(s) in r['ok'] + assert header_to_revoke.keys() == revoked_stamps.keys() + for key in header_to_revoke.keys(): + assert list(maybe_list(header_to_revoke[key])) == revoked_stamps[key] revoked_stamps.clear() def test_revoke_return_value_terminate_true(self): @@ -630,9 +639,9 @@ def test_revoke_return_value_terminate_true(self): worker_state.task_reserved(request) state = self.create_state() state.consumer = Mock() - r = control.revoke(state, headers["id"], terminate=True) r_headers = control.revoke_by_stamped_headers(state, header_to_revoke, terminate=True) - assert r["ok"] == r_headers["ok"] + # revoke & revoke_by_stamped_headers are not aligned anymore in their return values + assert "{'foo': {'bar'}}" in r_headers["ok"] def test_autoscale(self): self.panel.state.consumer = Mock() diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index bd63561f0cc..342e7092b1a 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -21,7 +21,7 @@ from celery.worker import strategy from celery.worker.request import Request, create_request_cls from celery.worker.request import logger as req_logger -from celery.worker.state import revoked +from celery.worker.state import revoked, revoked_stamps class RequestCase: @@ -579,6 +579,33 @@ def test_revoked(self): assert job._already_revoked assert job.acknowledged + @pytest.mark.parametrize( + "header_to_revoke", + [ + {'header_A': 'value_1'}, + {'header_B': ['value_2', 'value_3']}, + {'header_C': ('value_2', 'value_3')}, + {'header_D': {'value_2', 'value_3'}}, + {'header_E': [1, '2', 3.0]}, + ], + ) + def test_revoked_by_stamped_headers(self, header_to_revoke): + revoked_stamps.clear() + job = self.xRequest() + stamps = header_to_revoke + stamped_headers = list(header_to_revoke.keys()) + job._message.headers['stamps'] = stamps + job._message.headers['stamped_headers'] = stamped_headers + job._request_dict['stamps'] = stamps + job._request_dict['stamped_headers'] = stamped_headers + with self.assert_signal_called( + task_revoked, sender=job.task, request=job._context, + terminated=False, expired=False, signum=None): + revoked_stamps.update(stamps) + assert job.revoked() + assert job._already_revoked + assert job.acknowledged + def test_execute_does_not_execute_revoked(self): job = self.xRequest() revoked.add(job.id) From c2dd40816405495d03c853cf9e2a650134cb62e4 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 30 May 2023 00:36:09 +0600 Subject: [PATCH 0387/1051] start adding sqla v 2.0 compatibility (#8050) * start adding sqla v 2.0 compatibility * Update requirements/extras/sqlalchemy.txt * Update sqlalchemy.txt * Update requirements/extras/sqlalchemy.txt * Update sqlalchemy.txt * Update requirements/extras/sqlalchemy.txt * Update requirements/extras/sqlalchemy.txt --- requirements/extras/sqlalchemy.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 4e6c56a2d6c..579a9263827 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy>=1.4.48,<2.0 +sqlalchemy>=1.4.48,<2.1 \ No newline at end of file From 3346868864df500691e601d617c98f44405e1f71 Mon Sep 17 00:00:00 2001 From: danigm Date: Tue, 30 May 2023 06:24:59 +0200 Subject: [PATCH 0388/1051] Support sqlalchemy 2.0 in tests (#8271) Co-authored-by: Asif Saif Uddin --- requirements/extras/sqlalchemy.txt | 2 +- t/unit/backends/test_database.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 579a9263827..1e8fb62d436 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy>=1.4.48,<2.1 \ No newline at end of file +sqlalchemy>=1.4.48,<2.1 diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index 511298f9a1b..a693f383f67 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -408,7 +408,12 @@ def test_prepare_models_terminates(self, create_engine): from sqlalchemy.dialects.sqlite import dialect from sqlalchemy.exc import DatabaseError - sqlite = dialect.dbapi() + if hasattr(dialect, 'dbapi'): + # Method name in SQLAlchemy < 2.0 + sqlite = dialect.dbapi() + else: + # Newer method name in SQLAlchemy 2.0 + sqlite = dialect.import_dbapi() manager = SessionManager() engine = manager.get_engine('dburi') From d127c526b9ef9088ebe8fa2a4c9bbf5ebec6e66a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 30 May 2023 12:20:23 +0600 Subject: [PATCH 0389/1051] Update test-ci-base.txt (#8273) --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 72be056e56d..626cbbaf90c 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,4 @@ -pytest-cov==4.0.0 +pytest-cov==4.1.0 pytest-github-actions-annotate-failures==0.2.0 -r extras/redis.txt -r extras/sqlalchemy.txt From 9ed1a6f156b819c09b0baf9ed8b133659e61f9ae Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 30 May 2023 15:08:31 +0600 Subject: [PATCH 0390/1051] Update sqs.txt kombu 5.3.0rc1 (#8274) --- requirements/extras/sqs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 3b76a17bbd0..8cb74148e8f 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1 @@ -kombu[sqs]~=5.3.0b3 +kombu[sqs]>=5.3.0rc1 From 741ea9dc87971e7a4c386436c5ac500ec6a9a0f3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 30 May 2023 12:09:07 +0300 Subject: [PATCH 0391/1051] Fix docker (#8275) * Fixed Dockerfile build with docker compose * Updated redis and rabbitmq docker lables to latest in docker/docker-compose.yml --- docker/Dockerfile | 69 +++++++++++++++++++++++---------------- docker/docker-compose.yml | 4 +-- 2 files changed, 42 insertions(+), 31 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index f7e36e957c4..66ca8a30a78 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:bullseye-slim +FROM debian:bookworm-slim ENV PYTHONUNBUFFERED 1 ENV PYTHONIOENCODING UTF-8 @@ -50,14 +50,21 @@ WORKDIR $PROVISIONING # Scripts will lose their executable flags on copy. To avoid the extra instructions # we call the shell directly. #RUN sh install-couchbase.sh -COPY docker/scripts/create-linux-user.sh . -RUN sh create-linux-user.sh +RUN useradd -m -s /bin/bash $CELERY_USER # Swap to the celery user so packages and celery are not installed as root. USER $CELERY_USER -COPY docker/scripts/install-pyenv.sh . -RUN sh install-pyenv.sh +# Install pyenv +RUN curl https://pyenv.run | bash + +# Install required Python versions +RUN pyenv install 3.8 +RUN pyenv install 3.9 +RUN pyenv install 3.10 + +# Set global Python versions +RUN pyenv global 3.8 3.9 3.10 # Install celery WORKDIR $HOME @@ -66,45 +73,49 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local python3.9 python3.8 python3.7 python3.10 +RUN pyenv local 3.8 3.9 3.10 -RUN pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.7 -m pip install --upgrade pip setuptools wheel && \ +RUN pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel +COPY --chown=1000:1000 . $HOME/celery + +RUN pyenv exec python3.8 -m pip install -e $HOME/celery && \ + pyenv exec python3.9 -m pip install -e $HOME/celery && \ + pyenv exec python3.10 -m pip install -e $HOME/celery + # Setup one celery environment for basic development use -RUN pyenv exec python3.9 -m pip install \ +RUN pyenv exec python3.8 -m pip install \ + -r requirements/default.txt \ -r requirements/dev.txt \ - -r requirements/test.txt \ - -r requirements/test-ci-default.txt \ -r requirements/docs.txt \ - -r requirements/test-integration.txt \ - -r requirements/pkgutils.txt && \ - pyenv exec python3.8 -m pip install \ - -r requirements/dev.txt \ - -r requirements/test.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ -r requirements/test-ci-default.txt \ - -r requirements/docs.txt \ -r requirements/test-integration.txt \ - -r requirements/pkgutils.txt && \ - pyenv exec python3.7 -m pip install \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt && \ + pyenv exec python3.9 -m pip install \ + -r requirements/default.txt \ -r requirements/dev.txt \ - -r requirements/test.txt \ - -r requirements/test-ci-default.txt \ -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ -r requirements/test-integration.txt \ - -r requirements/pkgutils.txt && \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt && \ pyenv exec python3.10 -m pip install \ + -r requirements/default.txt \ -r requirements/dev.txt \ - -r requirements/test.txt \ - -r requirements/test-ci-default.txt \ -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ -r requirements/test-integration.txt \ - -r requirements/pkgutils.txt - - -COPY --chown=1000:1000 . $HOME/celery + -r requirements/test-pypy3.txt \ + -r requirements/test.txt WORKDIR $HOME/celery diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 23256d12301..c37501f1dc0 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -27,10 +27,10 @@ services: - azurite rabbit: - image: rabbitmq:3.9 + image: rabbitmq:latest redis: - image: redis:6.2 + image: redis:latest dynamodb: image: amazon/dynamodb-local:latest From df12da6c084aa579d9ba7c98572500d5ddb0aa0b Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 11:25:39 +0600 Subject: [PATCH 0392/1051] Update default.txt (#8277) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 0c25f442bb0..a5e15a19183 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.1.0,<5.0 -kombu>=5.3.0rc1,<6.0 +kombu>=5.3.0rc2,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 631ad8e1358b79c88513c49229e757e5a624618c Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 11:26:55 +0600 Subject: [PATCH 0393/1051] Update sqs.txt --- requirements/extras/sqs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 8cb74148e8f..c523c587be5 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1 @@ -kombu[sqs]>=5.3.0rc1 +kombu[sqs]>=5.3.0rc2 From 385e81434e486ba1d33dfd21495c8d55fd67e569 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 11:32:54 +0600 Subject: [PATCH 0394/1051] Update test.txt --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index c9b99a88e0f..b1b0dd9a451 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ pytest-subtests==0.11.0 pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 -boto3>=1.26.114 +boto3>=1.26.143 moto==4.1.10 # typing extensions mypy==1.3.0; platform_python_implementation=="CPython" From 78a00b6c5616d519cb5fa334d0b59fd1e77294c1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 12:02:55 +0600 Subject: [PATCH 0395/1051] Update redis.txt 4.5 (#8278) --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index e186f2e9e9f..16c0c206a11 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.3.6,<4.4.0 +redis>=4.5.2 From 44a2ad2113ed080a96cef0f60e17594bbcc8c61a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 12:03:37 +0600 Subject: [PATCH 0396/1051] Update pkgutils.txt (#8279) --- requirements/pkgutils.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt index abe74e0ef86..652a4c801a5 100644 --- a/requirements/pkgutils.txt +++ b/requirements/pkgutils.txt @@ -2,8 +2,8 @@ setuptools>=40.8.0 wheel>=0.33.1 flake8>=3.8.3 flakeplus>=1.1 -flake8-docstrings~=1.5 -pydocstyle==6.1.1; python_version >= '3.0' +flake8-docstrings>=1.7.0 +pydocstyle==6.3.0 tox>=3.8.4 sphinx2rst>=1.0 # Disable cyanide until it's fully updated. From dfb661df93da55953e58af1fee9bad12ba499958 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 12:53:22 +0600 Subject: [PATCH 0397/1051] remove python 3.7 from tests (#8280) * remove python 3.7 from tests * Update README.rst --- README.rst | 4 ++-- setup.py | 2 -- tox.ini | 10 ++++------ 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/README.rst b/README.rst index 952f684a772..22481e14440 100644 --- a/README.rst +++ b/README.rst @@ -60,8 +60,8 @@ What do I need? Celery version 5.3.0a1 runs on, -- Python (3.7, 3.8, 3.9, 3.10) -- PyPy3.7 (7.3.7+) +- Python (3.8, 3.9, 3.10) +- PyPy3.8+ (v7.3.11+) This is the version of celery which will support Python 3.7 or newer. diff --git a/setup.py b/setup.py index 480ed33d2f2..60edefe434b 100755 --- a/setup.py +++ b/setup.py @@ -169,8 +169,6 @@ def long_description(): "Framework :: Celery", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/tox.ini b/tox.ini index 2820e656884..207770dda2c 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,8 @@ requires = tox-gh-actions envlist = - {3.7,3.8,3.9,3.10,pypy3}-unit - {3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} + {3.8,3.9,3.10,pypy3}-unit + {3.8,3.9,3.10,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} flake8 apicheck @@ -13,7 +13,6 @@ envlist = [gh-actions] python = - 3.7: 3.7-unit 3.8: 3.8-unit 3.9: 3.9-unit 3.10: 3.10-unit @@ -29,8 +28,8 @@ deps= -r{toxinidir}/requirements/test.txt -r{toxinidir}/requirements/pkgutils.txt - 3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt - 3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt + 3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt + 3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt @@ -75,7 +74,6 @@ setenv = azureblockblob: TEST_BACKEND=azureblockblob://DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1; basepython = - 3.7: python3.7 3.8: python3.8 3.9: python3.9 3.10: python3.10 From 88627727606779f96843087effe84dc7320b413f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 20:42:04 +0600 Subject: [PATCH 0398/1051] added changelog for v5.3.0rc2 --- Changelog.rst | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index 72095626d5b..fdf249b84b1 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,10 +8,28 @@ This document contains change notes for bugfix & new features in the main branch & 5.2.x series, please see :ref:`whatsnew-5.2` for an overview of what's new in Celery 5.2. + +.. _version-5.3.0rc2: + +5.3.0rc2 +======== + +:release-date: 2023-05-31 9:00 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Add missing dependency. +- Fix exc_type being the exception instance rather. +- Fixed revoking tasks by stamped headers (#8269). +- Support sqlalchemy 2.0 in tests (#8271). +- Fix docker (#8275). +- Update redis.txt to 4.5 (#8278). +- Update kombu>=5.3.0rc2. + + .. _version-5.3.0rc1: 5.3.0rc1 -======= +======== :release-date: 2023-05-11 4:24 P.M GMT+2 :release-by: Tomer Nosrati From f51f805cbdfcedfd34a4e19f07f26fcc81e2c696 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 31 May 2023 20:45:59 +0600 Subject: [PATCH 0399/1051] =?UTF-8?q?Bump=20version:=205.3.0rc1=20?= =?UTF-8?q?=E2=86=92=205.3.0rc2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 874bd6d88c7..6bf1243a0e8 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.0rc1 +current_version = 5.3.0rc2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 22481e14440..5fd1ae76ad2 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0rc1 (dawn-chorus) +:Version: 5.3.0rc2 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 32bb3c56572..ed47561e262 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'dawn-chorus' -__version__ = '5.3.0rc1' +__version__ = '5.3.0rc2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index bb2643ac0b2..13927847965 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0rc1 (dawn-chorus) +:Version: 5.3.0rc2 (dawn-chorus) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 170d725710f5c4f4935177f93a247fc0fc2c2f1d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 2 Jun 2023 06:36:46 +0600 Subject: [PATCH 0400/1051] ugrade syntax to py3.8 (#8281) * ugrade syntax to py3.8 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update celery/concurrency/thread.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- celery/concurrency/thread.py | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6464f96e8f3..58aea37df77 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: rev: v3.4.0 hooks: - id: pyupgrade - args: ["--py37-plus"] + args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 rev: 6.0.0 diff --git a/celery/concurrency/thread.py b/celery/concurrency/thread.py index b9c23e0173a..bcc7c11647c 100644 --- a/celery/concurrency/thread.py +++ b/celery/concurrency/thread.py @@ -9,12 +9,7 @@ __all__ = ('TaskPool',) if TYPE_CHECKING: - import sys - - if sys.version_info >= (3, 8): - from typing import TypedDict - else: - from typing_extensions import TypedDict + from typing import TypedDict PoolInfo = TypedDict('PoolInfo', {'max-concurrency': int, 'threads': int}) From e1571986ea4d6692bc1828f53416251bd08be4c5 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 2 Jun 2023 06:38:50 +0600 Subject: [PATCH 0401/1051] Update setup.cfg (#8287) update deprecated settings & use updated deps --- setup.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 92cde32eb3a..bda1be9ec24 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,11 +34,11 @@ per-file-ignores = [bdist_rpm] requires = backports.zoneinfo>=0.2.1;python_version<'3.9' tzdata>=2022.7 - billiard >=4.0.2,<5.0 - kombu >= 5.2.1,<6.0.0 + billiard >=4.1.0,<5.0 + kombu >= 5.3.0rc2,<6.0.0 [bdist_wheel] universal = 0 [metadata] -license_file = LICENSE +license_files = LICENSE From ef2fcb4322cff670e7a0fc2a1d0075cb2af6829e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 3 Jun 2023 11:26:50 +0600 Subject: [PATCH 0402/1051] Update s3.txt boto3>=1.26.143 --- requirements/extras/s3.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/s3.txt b/requirements/extras/s3.txt index 9697544c76b..981aedd4a38 100644 --- a/requirements/extras/s3.txt +++ b/requirements/extras/s3.txt @@ -1 +1 @@ -boto3>=1.26.139 +boto3>=1.26.143 From 525f90e4dafd153e7cd8cc0fb921c24a7f45eca0 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 3 Jun 2023 11:59:33 +0600 Subject: [PATCH 0403/1051] Update dynamodb.txt deps (#8291) --- requirements/extras/dynamodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/dynamodb.txt b/requirements/extras/dynamodb.txt index f52faa35c3a..981aedd4a38 100644 --- a/requirements/extras/dynamodb.txt +++ b/requirements/extras/dynamodb.txt @@ -1 +1 @@ -boto3>=1.22.2 +boto3>=1.26.143 From 4294bde623c22a705e51d46c9803de015c1cac39 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 3 Jun 2023 12:00:14 +0600 Subject: [PATCH 0404/1051] Update auth.txt (#8290) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 6e51e89542f..d4a35167c7d 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==40.0.2 +cryptography==41.0.1 From f3d9e554cc372e4e7e38f2fe7ee97d292b753533 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 3 Jun 2023 12:08:52 +0600 Subject: [PATCH 0405/1051] Update librabbitmq.txt > 2.0.0 (#8292) --- requirements/extras/librabbitmq.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/librabbitmq.txt b/requirements/extras/librabbitmq.txt index 8f9a2dbca81..874e223c7a7 100644 --- a/requirements/extras/librabbitmq.txt +++ b/requirements/extras/librabbitmq.txt @@ -1 +1 @@ -librabbitmq>=1.5.0 +librabbitmq>=2.0.0 From ebd2b6b38646f75bf7a8b447ac63d4be297a2367 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 3 Jun 2023 12:26:52 +0600 Subject: [PATCH 0406/1051] Minor Update on README.rst --- README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 5fd1ae76ad2..dc4ea26499c 100644 --- a/README.rst +++ b/README.rst @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.0a1 runs on, +Celery version 5.3.0rc2 runs on, - Python (3.8, 3.9, 3.10) - PyPy3.8+ (v7.3.11+) @@ -69,6 +69,7 @@ This is the version of celery which will support Python 3.7 or newer. If you're running an older version of Python, you need to be running an older version of Celery: +- Python 3.7: Celery 5.2 or earlier. - Python 3.6: Celery 5.1 or earlier. - Python 2.7: Celery 4.x series. - Python 2.6: Celery series 3.1 or earlier. @@ -91,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.0a1 coming from previous versions then you should read our +new to Celery v5.3.0rc2 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ From d34146f1cf54622fc55a764641fbc3de4feacae1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 4 Jun 2023 11:20:52 +0600 Subject: [PATCH 0407/1051] test kombu 5.3.0 & minor doc update (#8294) * test kombu 5.3.0 & minor doc update * test kombu 5.3.0 & minor doc update --- requirements/README.rst | 7 ++----- requirements/default.txt | 2 +- requirements/extras/sqs.txt | 2 +- setup.cfg | 2 +- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/requirements/README.rst b/requirements/README.rst index 890bb189a68..a3d718b06e7 100644 --- a/requirements/README.rst +++ b/requirements/README.rst @@ -8,11 +8,8 @@ Index * :file:`requirements/default.txt` - Default requirements for Python 3.7+. + Default requirements for Python 3.8+. -* :file:`requirements/jython.txt` - - Extra requirements needed to run on Jython 2.5 * :file:`requirements/security.txt` @@ -29,7 +26,7 @@ Index * :file:`requirements/test-ci-default.txt` - Extra test requirements required for Python 3.7 by the CI suite (Tox). + Extra test requirements required for Python 3.8 by the CI suite (Tox). * :file:`requirements/test-integration.txt` diff --git a/requirements/default.txt b/requirements/default.txt index a5e15a19183..c51039d0c73 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.1.0,<5.0 -kombu>=5.3.0rc2,<6.0 +kombu>=5.3.0,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index c523c587be5..173f2cc8d7a 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1 @@ -kombu[sqs]>=5.3.0rc2 +kombu[sqs]>=5.3.0 diff --git a/setup.cfg b/setup.cfg index bda1be9ec24..fffebc3afb3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,7 +35,7 @@ per-file-ignores = requires = backports.zoneinfo>=0.2.1;python_version<'3.9' tzdata>=2022.7 billiard >=4.1.0,<5.0 - kombu >= 5.3.0rc2,<6.0.0 + kombu >= 5.3.0,<6.0.0 [bdist_wheel] universal = 0 From 60b9945fb1c7c9eb8bedac230c715b662a3b4f54 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 09:39:36 +0600 Subject: [PATCH 0408/1051] Update pyro.txt --- requirements/extras/pyro.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pyro.txt b/requirements/extras/pyro.txt index d19b0db3892..bde9e2995b9 100644 --- a/requirements/extras/pyro.txt +++ b/requirements/extras/pyro.txt @@ -1 +1 @@ -pyro4 +pyro4==4.82 From 5a2ece45e777cdce251bce1e2bd4ef3cac28014e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 09:47:26 +0600 Subject: [PATCH 0409/1051] Update moto version in test.txt --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index b1b0dd9a451..f7fa249f3c0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -5,7 +5,7 @@ pytest-timeout==2.1.0 pytest-click==1.1.0 pytest-order==1.1.0 boto3>=1.26.143 -moto==4.1.10 +moto>=4.1.11 # typing extensions mypy==1.3.0; platform_python_implementation=="CPython" pre-commit==2.21.0 From af0b8870ac5ad1970b0dd811b2ce99c345ec8aa0 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 11:32:11 +0600 Subject: [PATCH 0410/1051] Whatsnew in Celery 5.3.0 (#8300) * whats new in celery 5.3.0 * update * update change highligts for 5.3 * update release code name to Emerald Rush * update release code name to Emerald Rush * add more major changes * add more major changes --- docs/whatsnew-5.2.rst | 393 ------------------------------------------ docs/whatsnew-5.3.rst | 349 +++++++++++++++++++++++++++++++++++++ 2 files changed, 349 insertions(+), 393 deletions(-) delete mode 100644 docs/whatsnew-5.2.rst create mode 100644 docs/whatsnew-5.3.rst diff --git a/docs/whatsnew-5.2.rst b/docs/whatsnew-5.2.rst deleted file mode 100644 index 3e2a8700a64..00000000000 --- a/docs/whatsnew-5.2.rst +++ /dev/null @@ -1,393 +0,0 @@ -.. _whatsnew-5.2: - -========================================= - What's new in Celery 5.2 (Dawn Chorus) -========================================= -:Author: Omer Katz (``omer.drow at gmail.com``) - -.. sidebar:: Change history - - What's new documents describe the changes in major versions, - we also have a :ref:`changelog` that lists the changes in bugfix - releases (0.0.x), while older series are archived under the :ref:`history` - section. - -Celery is a simple, flexible, and reliable distributed programming framework -to process vast amounts of messages, while providing operations with -the tools required to maintain a distributed system with python. - -It's a task queue with focus on real-time processing, while also -supporting task scheduling. - -Celery has a large and diverse community of users and contributors, -you should come join us :ref:`on IRC ` -or :ref:`our mailing-list `. - -.. note:: - - Following the problems with Freenode, we migrated our IRC channel to Libera Chat - as most projects did. - You can also join us using `Gitter `_. - - We're sometimes there to answer questions. We welcome you to join. - -To read more about Celery you should go read the :ref:`introduction `. - -While this version is **mostly** backward compatible with previous versions -it's important that you read the following section as this release -is a new major version. - -This version is officially supported on CPython 3.7 & 3.8 & 3.9 -and is also supported on PyPy3. - -.. _`website`: http://celeryproject.org/ - -.. topic:: Table of Contents - - Make sure you read the important notes before upgrading to this version. - -.. contents:: - :local: - :depth: 2 - -Preface -======= - -.. note:: - - **This release contains fixes for two (potentially severe) memory leaks. - We encourage our users to upgrade to this release as soon as possible.** - -The 5.2.0 release is a new minor release for Celery. - -Releases in the 5.x series are codenamed after songs of `Jon Hopkins `_. -This release has been codenamed `Dawn Chorus `_. - -From now on we only support Python 3.7 and above. -We will maintain compatibility with Python 3.7 until it's -EOL in June, 2023. - -*— Omer Katz* - -Long Term Support Policy ------------------------- - -We no longer support Celery 4.x as we don't have the resources to do so. -If you'd like to help us, all contributions are welcome. - -Celery 5.x **is not** an LTS release. We will support it until the release -of Celery 6.x. - -We're in the process of defining our Long Term Support policy. -Watch the next "What's New" document for updates. - -Wall of Contributors --------------------- - -.. note:: - - This wall was automatically generated from git history, - so sadly it doesn't not include the people who help with more important - things like answering mailing-list questions. - -Upgrading from Celery 4.x -========================= - -Step 1: Adjust your command line invocation -------------------------------------------- - -Celery 5.0 introduces a new CLI implementation which isn't completely backwards compatible. - -The global options can no longer be positioned after the sub-command. -Instead, they must be positioned as an option for the `celery` command like so:: - - celery --app path.to.app worker - -If you were using our :ref:`daemonizing` guide to deploy Celery in production, -you should revisit it for updates. - -Step 2: Update your configuration with the new setting names ------------------------------------------------------------- - -If you haven't already updated your configuration when you migrated to Celery 4.0, -please do so now. - -We elected to extend the deprecation period until 6.0 since -we did not loudly warn about using these deprecated settings. - -Please refer to the :ref:`migration guide ` for instructions. - -Step 3: Read the important notes in this document -------------------------------------------------- - -Make sure you are not affected by any of the important upgrade notes -mentioned in the :ref:`following section `. - -You should verify that none of the breaking changes in the CLI -do not affect you. Please refer to :ref:`New Command Line Interface ` for details. - -Step 4: Migrate your code to Python 3 -------------------------------------- - -Celery 5.x only supports Python 3. Therefore, you must ensure your code is -compatible with Python 3. - -If you haven't ported your code to Python 3, you must do so before upgrading. - -You can use tools like `2to3 `_ -and `pyupgrade `_ to assist you with -this effort. - -After the migration is done, run your test suite with Celery 4 to ensure -nothing has been broken. - -Step 5: Upgrade to Celery 5.2 ------------------------------ - -At this point you can upgrade your workers and clients with the new version. - -.. _v520-important: - -Important Notes -=============== - -Supported Python Versions -------------------------- - -The supported Python versions are: - -- CPython 3.7 -- CPython 3.8 -- CPython 3.9 -- PyPy3.7 7.3 (``pypy3``) - -Experimental support -~~~~~~~~~~~~~~~~~~~~ - -Celery supports these Python versions provisionally as they are not production -ready yet: - -- CPython 3.10 (currently in RC2) - -Memory Leak Fixes ------------------ - -Two severe memory leaks have been fixed in this version: - -* :class:`celery.result.ResultSet` no longer holds a circular reference to itself. -* The prefork pool no longer keeps messages in its cache forever when the master - process disconnects from the broker. - -The first memory leak occurs when you use :class:`celery.result.ResultSet`. -Each instance held a promise which provides that instance as an argument to -the promise's callable. -This caused a circular reference which kept the ResultSet instance in memory -forever since the GC couldn't evict it. -The provided argument is now a :func:`weakref.proxy` of the ResultSet's -instance. -The memory leak mainly occurs when you use :class:`celery.result.GroupResult` -since it inherits from :class:`celery.result.ResultSet` which doesn't get used -that often. - -The second memory leak exists since the inception of the project. -The prefork pool maintains a cache of the jobs it executes. -When they are complete, they are evicted from the cache. -However, when Celery disconnects from the broker, we flush the pool -and discard the jobs, expecting that they'll be cleared later once the worker -acknowledges them but that has never been the case. -Instead, these jobs remain forever in memory. -We now discard those jobs immediately while flushing. - -Dropped support for Python 3.6 ------------------------------- - -Celery now requires Python 3.7 and above. - -Python 3.6 will reach EOL in December, 2021. -In order to focus our efforts we have dropped support for Python 3.6 in -this version. - -If you still require to run Celery using Python 3.6 -you can still use Celery 5.1. -However we encourage you to upgrade to a supported Python version since -no further security patches will be applied for Python 3.6 after -the 23th of December, 2021. - -Tasks ------ - -When replacing a task with another task, we now give an indication of the -replacing nesting level through the ``replaced_task_nesting`` header. - -A task which was never replaced has a ``replaced_task_nesting`` value of 0. - -Kombu ------ - -Starting from v5.2, the minimum required version is Kombu 5.2.0. - -Prefork Workers Pool ---------------------- - -Now all orphaned worker processes are killed automatically when main process exits. - -Eventlet Workers Pool ---------------------- - -You can now terminate running revoked tasks while using the -Eventlet Workers Pool. - -Custom Task Classes -------------------- - -We introduced a custom handler which will be executed before the task -is started called ``before_start``. - -See :ref:`custom-task-cls-app-wide` for more details. - -Important Notes From 5.0 ------------------------- - -Dropped support for Python 2.7 & 3.5 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Celery now requires Python 3.6 and above. - -Python 2.7 has reached EOL in January 2020. -In order to focus our efforts we have dropped support for Python 2.7 in -this version. - -In addition, Python 3.5 has reached EOL in September 2020. -Therefore, we are also dropping support for Python 3.5. - -If you still require to run Celery using Python 2.7 or Python 3.5 -you can still use Celery 4.x. -However we encourage you to upgrade to a supported Python version since -no further security patches will be applied for Python 2.7 or -Python 3.5. - -Eventlet Workers Pool -~~~~~~~~~~~~~~~~~~~~~ - -Due to `eventlet/eventlet#526 `_ -the minimum required version is eventlet 0.26.1. - -Gevent Workers Pool -~~~~~~~~~~~~~~~~~~~ - -Starting from v5.0, the minimum required version is gevent 1.0.0. - -Couchbase Result Backend -~~~~~~~~~~~~~~~~~~~~~~~~ - -The Couchbase result backend now uses the V3 Couchbase SDK. - -As a result, we no longer support Couchbase Server 5.x. - -Also, starting from v5.0, the minimum required version -for the database client is couchbase 3.0.0. - -To verify that your Couchbase Server is compatible with the V3 SDK, -please refer to their `documentation `_. - -Riak Result Backend -~~~~~~~~~~~~~~~~~~~ - -The Riak result backend has been removed as the database is no longer maintained. - -The Python client only supports Python 3.6 and below which prevents us from -supporting it and it is also unmaintained. - -If you are still using Riak, refrain from upgrading to Celery 5.0 while you -migrate your application to a different database. - -We apologize for the lack of notice in advance but we feel that the chance -you'll be affected by this breaking change is minimal which is why we -did it. - -AMQP Result Backend -~~~~~~~~~~~~~~~~~~~ - -The AMQP result backend has been removed as it was deprecated in version 4.0. - -Removed Deprecated Modules -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The `celery.utils.encoding` and the `celery.task` modules has been deprecated -in version 4.0 and therefore are removed in 5.0. - -If you were using the `celery.utils.encoding` module before, -you should import `kombu.utils.encoding` instead. - -If you were using the `celery.task` module before, you should import directly -from the `celery` module instead. - -`azure-servicebus` 7.0.0 is now required ----------------------------------------- - -Given the SDK changes between 0.50.0 and 7.0.0 Kombu deprecates support for -older `azure-servicebus` versions. - -.. _v520-news: - -Bug: Pymongo 3.12.1 is not compatible with Celery 5.2 -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -For now we are limiting Pymongo version, only allowing for versions between 3.3.0 and 3.12.0. - -This will be fixed in the next patch. - -News -==== - -Support for invoking chords of unregistered tasks -------------------------------------------------- - -Previously if you attempted to publish a chord -while providing a signature which wasn't registered in the Celery app publishing -the chord as the body of the chord, an :exc:`celery.exceptions.NotRegistered` -exception would be raised. - -From now on, you can publish these sort of chords and they would be executed -correctly: - -.. code-block:: python - - # movies.task.publish_movie is registered in the current app - movie_task = celery_app.signature('movies.task.publish_movie', task_id=str(uuid.uuid4()), immutable=True) - # news.task.publish_news is *not* registered in the current app - news_task = celery_app.signature('news.task.publish_news', task_id=str(uuid.uuid4()), immutable=True) - - my_chord = chain(movie_task, - group(movie_task.set(task_id=str(uuid.uuid4())), - movie_task.set(task_id=str(uuid.uuid4()))), - news_task) - my_chord.apply_async() # <-- No longer raises an exception - -Consul Result Backend ---------------------- - -We now create a new client per request to Consul to avoid a bug in the Consul -client. - -The Consul Result Backend now accepts a new -:setting:`result_backend_transport_options` key: ``one_client``. -You can opt out of this behavior by setting ``one_client`` to True. - -Please refer to the documentation of the backend if you're using the Consul -backend to find out which behavior suites you. - -Filesystem Result Backend -------------------------- - -We now cleanup expired task results while using the -filesystem result backend as most result backends do. - -ArangoDB Result Backend ------------------------ - -You can now check the validity of the CA certificate while making -a TLS connection to ArangoDB result backend. - -If you'd like to do so, set the ``verify`` key in the -:setting:`arangodb_backend_settings` dictionary to ``True``. diff --git a/docs/whatsnew-5.3.rst b/docs/whatsnew-5.3.rst new file mode 100644 index 00000000000..24ca6838ebb --- /dev/null +++ b/docs/whatsnew-5.3.rst @@ -0,0 +1,349 @@ +.. _whatsnew-5.3: + +========================================= + What's new in Celery 5.3 (Emerald Rush) +========================================= +:Author: Asif Saif Uddin (``auvipy at gmail.com``). + +.. sidebar:: Change history + + What's new documents describe the changes in major versions, + we also have a :ref:`changelog` that lists the changes in bugfix + releases (0.0.x), while older series are archived under the :ref:`history` + section. + +Celery is a simple, flexible, and reliable distributed programming framework +to process vast amounts of messages, while providing operations with +the tools required to maintain a distributed system with python. + +It's a task queue with focus on real-time processing, while also +supporting task scheduling. + +Celery has a large and diverse community of users and contributors, +you should come join us :ref:`on IRC ` +or :ref:`our mailing-list `. + +.. note:: + + Following the problems with Freenode, we migrated our IRC channel to Libera Chat + as most projects did. + You can also join us using `Gitter `_. + + We're sometimes there to answer questions. We welcome you to join. + +To read more about Celery you should go read the :ref:`introduction `. + +While this version is **mostly** backward compatible with previous versions +it's important that you read the following section as this release +is a new major version. + +This version is officially supported on CPython 3.8, 3.9 & 3.10 +and is also supported on PyPy3.8+. + +.. _`website`: https://docs.celeryq.dev/en/stable/ + +.. topic:: Table of Contents + + Make sure you read the important notes before upgrading to this version. + +.. contents:: + :local: + :depth: 2 + +Preface +======= + +.. note:: + + **This release contains fixes for many long standing bugs & stability issues. + We encourage our users to upgrade to this release as soon as possible.** + +The 5.3.0 release is a new feature release for Celery. + +Releases in the 5.x series are codenamed after songs of `Jon Hopkins `_. +This release has been codenamed `Emerald Rush `_. + +From now on we only support Python 3.8 and above. +We will maintain compatibility with Python 3.8 until it's +EOL in 2024. + +*— Asif Saif Uddin* + +Long Term Support Policy +------------------------ + +We no longer support Celery 4.x as we don't have the resources to do so. +If you'd like to help us, all contributions are welcome. + +Celery 5.x **is not** an LTS release. We will support it until the release +of Celery 6.x. + +We're in the process of defining our Long Term Support policy. +Watch the next "What's New" document for updates. + +Wall of Contributors +-------------------- + +.. note:: + + This wall was automatically generated from git history, + so sadly it doesn't not include the people who help with more important + things like answering mailing-list questions. + +Upgrading from Celery 4.x +========================= + +Step 1: Adjust your command line invocation +------------------------------------------- + +Celery 5.0 introduces a new CLI implementation which isn't completely backwards compatible. + +The global options can no longer be positioned after the sub-command. +Instead, they must be positioned as an option for the `celery` command like so:: + + celery --app path.to.app worker + +If you were using our :ref:`daemonizing` guide to deploy Celery in production, +you should revisit it for updates. + +Step 2: Update your configuration with the new setting names +------------------------------------------------------------ + +If you haven't already updated your configuration when you migrated to Celery 4.0, +please do so now. + +We elected to extend the deprecation period until 6.0 since +we did not loudly warn about using these deprecated settings. + +Please refer to the :ref:`migration guide ` for instructions. + +Step 3: Read the important notes in this document +------------------------------------------------- + +Make sure you are not affected by any of the important upgrade notes +mentioned in the :ref:`following section `. + +You should verify that none of the breaking changes in the CLI +do not affect you. Please refer to :ref:`New Command Line Interface ` for details. + +Step 4: Migrate your code to Python 3 +------------------------------------- + +Celery 5.x only supports Python 3. Therefore, you must ensure your code is +compatible with Python 3. + +If you haven't ported your code to Python 3, you must do so before upgrading. + +You can use tools like `2to3 `_ +and `pyupgrade `_ to assist you with +this effort. + +After the migration is done, run your test suite with Celery 4 to ensure +nothing has been broken. + +Step 5: Upgrade to Celery 5.3 +----------------------------- + +At this point you can upgrade your workers and clients with the new version. + +.. _v530-important: + +Important Notes +=============== + +Supported Python Versions +------------------------- + +The supported Python versions are: + +- CPython 3.8 +- CPython 3.9 +- CPython 3.10 +- PyPy3.8 7.3.11 (``pypy3``) + +Experimental support +~~~~~~~~~~~~~~~~~~~~ + +Celery supports these Python versions provisionally as they are not production +ready yet: + +- CPython 3.11 + +Quality Improvements and Stability Enhancements +----------------------------------------------- + +Celery 5.3 focuses on elevating the overall quality and stability of the project. +We have dedicated significant efforts to address various bugs, enhance performance, +and make improvements based on valuable user feedback. + +Better Compatibility and Upgrade Confidence +------------------------------------------- + +Our goal with Celery 5.3 is to instill confidence in users who are currently +using Celery 4 or older versions. We want to assure you that upgrading to +Celery 5.3 will provide a more robust and reliable experience. + + +Dropped support for Python 3.7 +------------------------------ + +Celery now requires Python 3.8 and above. + +Python 3.7 will reach EOL in June, 2023. +In order to focus our efforts we have dropped support for Python 3.6 in +this version. + +If you still require to run Celery using Python 3.7 +you can still use Celery 5.2. +However we encourage you to upgrade to a supported Python version since +no further security patches will be applied for Python 3.7 after +the 23th of June, 2023. + + +Automatic re-connection on connection loss to broker +---------------------------------------------------- + +Unless :setting:`broker_connection_retry_on_startup` is set to False, +Celery will automatically retry reconnecting to the broker after +the first connection loss. :setting:`broker_connection_retry` controls +whether to automatically retry reconnecting to the broker for subsequent +reconnects. + +Since the message broker does not track how many tasks were already fetched +before the connection was lost, Celery will reduce the prefetch count by +the number of tasks that are currently running multiplied by +:setting:`worker_prefetch_multiplier`. +The prefetch count will be gradually restored to the maximum allowed after +each time a task that was running before the connection was lost is complete + + +Kombu +----- + +Starting from v5.3.0, the minimum required version is Kombu 5.3.0. + +Redis +----- + +redis-py 4.5.x is the new minimum required version. + + +SQLAlchemy +--------------------- + +SQLAlchemy 1.4.x & 2.0.x is now supported in celery v5.3 + + +Billiard +------------------- + +Minimum required version is now 4.1.0 + + +Deprecate pytz and use zoneinfo +------------------------------- + +A switch have been made to zoneinfo for handling timezone data instead of pytz. + + +Support for out-of-tree worker pool implementations +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Prior to version 5.3, Celery had a fixed notion of the worker pool types it supports. +Celery v5.3.0 introduces the the possibility of an out-of-tree worker pool implementation. +This feature ensure that the current worker pool implementations consistently call into +BasePool._get_info(), and enhance it to report the work pool class in use via the +"celery inspect stats" command. For example: + +$ celery -A ... inspect stats +-> celery@freenas: OK + { + ... + "pool": { + ... + "implementation": "celery_aio_pool.pool:AsyncIOPool", + +It can be used as follows: + + Set the environment variable CELERY_CUSTOM_WORKER_POOL to the name of + an implementation of :class:celery.concurrency.base.BasePool in the + standard Celery format of "package:class". + + Select this pool using '--pool custom'. + + +Signal::``worker_before_create_process`` +---------------------------------------- + +Dispatched in the parent process, just before new child process is created in the prefork pool. +It can be used to clean up instances that don't behave well when forking. + +.. code-block:: python + @signals.worker_before_create_process.connect + def clean_channels(**kwargs): + grpc_singleton.clean_channel() + + +Setting::``beat_cron_starting_deadline`` +---------------------------------------- + +When using cron, the number of seconds :mod:`~celery.bin.beat` can look back +when deciding whether a cron schedule is due. When set to `None`, cronjobs that +are past due will always run immediately. + + +Redis result backend Global keyprefix +------------------------------------- + +The global key prefix will be prepended to all keys used for the result backend, +which can be useful when a redis database is shared by different users. +By default, no prefix is prepended. + +To configure the global keyprefix for the Redis result backend, use the +``global_keyprefix`` key under :setting:`result_backend_transport_options`: + + +.. code-block:: python + app.conf.result_backend_transport_options = { + 'global_keyprefix': 'my_prefix_' + } + + +Django +------ + +Minimum django version is bumped to v2.2.28. +Also added --skip-checks flag to bypass django core checks. + + +Make default worker state limits configurable +--------------------------------------------- + +Previously, `REVOKES_MAX`, `REVOKE_EXPIRES`, `SUCCESSFUL_MAX` and +`SUCCESSFUL_EXPIRES` were hardcoded in `celery.worker.state`. This +version introduces `CELERY_WORKER_` prefixed environment variables +with the same names that allow you to customize these values should +you need to. + + +Canvas stamping +--------------- + +The goal of the Stamping API is to give an ability to label the signature +and its components for debugging information purposes. For example, when +the canvas is a complex structure, it may be necessary to label some or +all elements of the formed structure. The complexity increases even more +when nested groups are rolled-out or chain elements are replaced. In such +cases, it may be necessary to understand which group an element is a part +of or on what nested level it is. This requires a mechanism that traverses +the canvas elements and marks them with specific metadata. The stamping API +allows doing that based on the Visitor pattern. + + +Known Issues +------------ +Canvas header stamping has issues in a hybrid Celery 4.x. & Celery 5.3.x +environment and is not safe for production use at the moment. + + + + From 1656bfc2c57bbae8a8dd944c376faabae76c6f22 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 11:38:03 +0600 Subject: [PATCH 0411/1051] Update README.rst with versions information --- README.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index dc4ea26499c..383b7ec336c 100644 --- a/README.rst +++ b/README.rst @@ -58,13 +58,13 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.0rc2 runs on, +Celery version 5.3.0 runs on, - Python (3.8, 3.9, 3.10) - PyPy3.8+ (v7.3.11+) -This is the version of celery which will support Python 3.7 or newer. +This is the version of celery which will support Python 3.8 or newer. If you're running an older version of Python, you need to be running an older version of Celery: @@ -77,7 +77,7 @@ an older version of Celery: - Python 2.4: Celery series 2.2 or earlier. Celery is a project with minimal funding, -so we don't support Microsoft Windows. +so we don't support Microsoft Windows but it should be working. Please don't open any issues related to that platform. *Celery* is usually used with a message broker to send and receive messages. @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.0rc2 coming from previous versions then you should read our +new to Celery v5.3.0 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ @@ -260,9 +260,9 @@ separating them by commas. :: - $ pip install "celery[amqp]" + $ pip install "celery[redis]" - $ pip install "celery[amqp,redis,auth,msgpack]" + $ pip install "celery[redis,auth,msgpack]" The following bundles are available: From 563a94949cf876ab93641229c7d0611b0295d112 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 11:54:07 +0600 Subject: [PATCH 0412/1051] added changelog for v5.3.0 --- Changelog.rst | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index fdf249b84b1..c334d1249fd 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -5,8 +5,21 @@ ================ This document contains change notes for bugfix & new features -in the main branch & 5.2.x series, please see :ref:`whatsnew-5.2` for -an overview of what's new in Celery 5.2. +in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for +an overview of what's new in Celery 5.3. + + +.. _version-5.3.0: + +5.3.0 +===== + +:release-date: 2023-06-06 12:00 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Test kombu 5.3.0 & minor doc update (#8294). +- Update librabbitmq.txt > 2.0.0 (#8292). +- Upgrade syntax to py3.8 (#8281). .. _version-5.3.0rc2: From 50d4c0b07a8aa5f079b7e3fdc5e765b77ea391fa Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 6 Jun 2023 12:01:00 +0600 Subject: [PATCH 0413/1051] =?UTF-8?q?Bump=20version:=205.3.0rc2=20?= =?UTF-8?q?=E2=86=92=205.3.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 4 ++-- docs/includes/introduction.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6bf1243a0e8..83c9418ed35 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.0rc2 +current_version = 5.3.0 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 383b7ec336c..913714c8584 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0rc2 (dawn-chorus) +:Version: 5.3.0 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index ed47561e262..52ec6194e78 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -15,9 +15,9 @@ # Lazy loading from . import local -SERIES = 'dawn-chorus' +SERIES = 'emerald-rush' -__version__ = '5.3.0rc2' +__version__ = '5.3.0' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 13927847965..f57870a3c5c 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0rc2 (dawn-chorus) +:Version: 5.3.0 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From ad3916f64d7c576ba340d28af7618337676bd497 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 7 Jun 2023 16:41:31 +0600 Subject: [PATCH 0414/1051] try latest pycurl release (#7069) * try pycurl latest pypy release * explicitly define sqs depedencies * Update sqs.txt * Update requirements/test-ci-default.txt * Update requirements/test-ci-default.txt * Update requirements/extras/sqs.txt * Update requirements/test-ci-default.txt * Update requirements/extras/sqs.txt --- requirements/extras/sqs.txt | 3 +++ requirements/test-ci-default.txt | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 173f2cc8d7a..7aa763de377 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1 +1,4 @@ +boto3>=1.26.143 +pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython" +urllib3>=1.26.16 kombu[sqs]>=5.3.0 diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index 93141b96175..5493cae1c99 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -21,4 +21,5 @@ git+https://github.com/celery/kombu.git # SQS dependencies other than boto -pycurl==7.43.0.5 +pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython" + From 44a00605c306b2d5f9ae84cf426b97c1ce48c091 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 7 Jun 2023 17:27:03 +0600 Subject: [PATCH 0415/1051] Update librabbitmq>=2.0.0; python_version < '3.11' (#8302) --- requirements/extras/librabbitmq.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/librabbitmq.txt b/requirements/extras/librabbitmq.txt index 874e223c7a7..e9784a52c9e 100644 --- a/requirements/extras/librabbitmq.txt +++ b/requirements/extras/librabbitmq.txt @@ -1 +1 @@ -librabbitmq>=2.0.0 +librabbitmq>=2.0.0; python_version < '3.11' From e540a8d59b69ecd8ddce004f2a92f540e8cf540c Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 8 Jun 2023 15:57:22 +0600 Subject: [PATCH 0416/1051] added initial support for python 3.11 (#8304) --- .github/workflows/python-package.yml | 7 +++---- README.rst | 2 +- setup.py | 3 ++- tox.ini | 10 ++++++---- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a8b9f963d37..e88812521b3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,13 +29,12 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', 'pypy-3.9', 'pypy-3.8'] + python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.9'] os: ["ubuntu-latest", "windows-latest"] exclude: - python-version: 'pypy-3.9' os: "windows-latest" - - python-version: 'pypy-3.8' - os: "windows-latest" + steps: - name: Install apt packages if: startsWith(matrix.os, 'ubuntu-') @@ -74,7 +73,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11'] toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] services: diff --git a/README.rst b/README.rst index 913714c8584..8c8852936a5 100644 --- a/README.rst +++ b/README.rst @@ -60,7 +60,7 @@ What do I need? Celery version 5.3.0 runs on, -- Python (3.8, 3.9, 3.10) +- Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) diff --git a/setup.py b/setup.py index 60edefe434b..6b0f0110bd8 100755 --- a/setup.py +++ b/setup.py @@ -146,7 +146,7 @@ def long_description(): license='BSD-3-Clause', platforms=['any'], install_requires=install_requires(), - python_requires=">=3.7", + python_requires=">=3.8", tests_require=reqs('test.txt'), extras_require=extras_require(), entry_points={ @@ -172,6 +172,7 @@ def long_description(): "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" diff --git a/tox.ini b/tox.ini index 207770dda2c..59d3676d1e3 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,8 @@ requires = tox-gh-actions envlist = - {3.8,3.9,3.10,pypy3}-unit - {3.8,3.9,3.10,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} + {3.8,3.9,3.10,3.11,pypy3}-unit + {3.8,3.9,3.10,3.11,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} flake8 apicheck @@ -16,6 +16,7 @@ python = 3.8: 3.8-unit 3.9: 3.9-unit 3.10: 3.10-unit + 3.11: 3.11-unit pypy-3: pypy3-unit [testenv] @@ -28,8 +29,8 @@ deps= -r{toxinidir}/requirements/test.txt -r{toxinidir}/requirements/pkgutils.txt - 3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt - 3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt + 3.8,3.9,3.10,3.11: -r{toxinidir}/requirements/test-ci-default.txt + 3.8,3.9,3.10,3.11: -r{toxinidir}/requirements/docs.txt pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt @@ -77,6 +78,7 @@ basepython = 3.8: python3.8 3.9: python3.9 3.10: python3.10 + 3.11: python3.11 pypy3: pypy3 mypy: python3.8 lint,apicheck,linkcheck,configcheck,bandit: python3.9 From c3063fc4cbb1c60cc451dda1843167ead0441d54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Viktor=20K=C3=A1lm=C3=A1n?= Date: Mon, 12 Jun 2023 15:36:57 +0200 Subject: [PATCH 0417/1051] fix supported versions in docs --- docs/django/first-steps-with-django.rst | 4 ++-- docs/getting-started/introduction.rst | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index a58fbdbea6d..35914e8098b 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -19,8 +19,8 @@ Using Celery with Django .. note:: - Celery 5.0.x supports Django 1.11 LTS or newer versions. Please use Celery 4.4.x - for versions older than Django 1.11. + Celery 5.3.x supports Django 2.2 LTS or newer versions. + Please use Celery 5.2.x for versions older than Django 2.2 or Celery 4.4.x if your Django version is older than 1.11. To use Celery with your Django project you must first define an instance of the Celery library (called an "app") diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst index 2797ce60097..18c672eb71a 100644 --- a/docs/getting-started/introduction.rst +++ b/docs/getting-started/introduction.rst @@ -39,10 +39,10 @@ What do I need? =============== .. sidebar:: Version Requirements - :subtitle: Celery version 5.2 runs on + :subtitle: Celery version 5.3 runs on - - Python ❨3.7, 3.8, 3.9, 3.10❩ - - PyPy3.7, 3.8 ❨7.3.7❩ + - Python ❨3.8, 3.9, 3.10, 3.11❩ + - PyPy3.8+ ❨v7.3.11+❩ Celery 4.x was the last version to support Python 2.7, Celery 5.x requires Python 3.6 or newer. From 25d6b50a84229598a2ecc3f865b9bbdabc8346b9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 17:34:40 +0000 Subject: [PATCH 0418/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.4.0 → v3.6.0](https://github.com/asottile/pyupgrade/compare/v3.4.0...v3.6.0) - [github.com/asottile/yesqa: v1.4.0 → v1.5.0](https://github.com/asottile/yesqa/compare/v1.4.0...v1.5.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58aea37df77..e2ac75c83ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.4.0 + rev: v3.6.0 hooks: - id: pyupgrade args: ["--py38-plus"] @@ -11,7 +11,7 @@ repos: - id: flake8 - repo: https://github.com/asottile/yesqa - rev: v1.4.0 + rev: v1.5.0 hooks: - id: yesqa From 58c851eb85f01b979447016cad75b70774b57644 Mon Sep 17 00:00:00 2001 From: Shahar Lev Date: Wed, 14 Jun 2023 16:42:42 +0300 Subject: [PATCH 0419/1051] ChainMap observers fix (#8305) * ChainMap observers fix Observers should not be shared across different instances. Aside from unwanted behavior, this can lead to object leaks (like celery app objects not being garbage collected). * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/utils/collections.py | 3 ++- t/unit/utils/test_collections.py | 18 ++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/celery/utils/collections.py b/celery/utils/collections.py index d03e0169a83..6fb559acecf 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -206,7 +206,7 @@ class ChainMap(MutableMapping): changes = None defaults = None maps = None - _observers = [] + _observers = () def __init__(self, *maps, **kwargs): # type: (*Mapping, **Any) -> None @@ -216,6 +216,7 @@ def __init__(self, *maps, **kwargs): maps=maps, changes=maps[0], defaults=maps[1:], + _observers=[], ) def add_defaults(self, d): diff --git a/t/unit/utils/test_collections.py b/t/unit/utils/test_collections.py index 79ccc011741..2f183899017 100644 --- a/t/unit/utils/test_collections.py +++ b/t/unit/utils/test_collections.py @@ -2,13 +2,14 @@ from collections.abc import Mapping from itertools import count from time import monotonic +from unittest.mock import Mock import pytest from billiard.einfo import ExceptionInfo import t.skip -from celery.utils.collections import (AttributeDict, BufferMap, ConfigurationView, DictAttribute, LimitedSet, - Messagebuffer) +from celery.utils.collections import (AttributeDict, BufferMap, ChainMap, ConfigurationView, DictAttribute, + LimitedSet, Messagebuffer) from celery.utils.objects import Bunch @@ -448,3 +449,16 @@ def test_pop_empty_no_default(self): def test_repr(self): assert repr(Messagebuffer(10, [1, 2, 3])) + + +class test_ChainMap: + + def test_observers_not_shared(self): + a = ChainMap() + b = ChainMap() + callback = Mock() + a.bind_to(callback) + b.update(x=1) + callback.assert_not_called() + a.update(x=1) + callback.assert_called_once_with(x=1) From 3f965ebb9321c982f229429eb002ec23f114aa7d Mon Sep 17 00:00:00 2001 From: "stuart.bradley" Date: Tue, 13 Jun 2023 08:51:28 +0100 Subject: [PATCH 0420/1051] Revert optimization flag behaviour back to 4.* --- celery/bin/worker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/bin/worker.py b/celery/bin/worker.py index 9dd1582030e..0cc3d6664cc 100644 --- a/celery/bin/worker.py +++ b/celery/bin/worker.py @@ -166,8 +166,8 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None, type=LOG_LEVEL, help_group="Worker Options", help="Logging level.") -@click.option('optimization', - '-O', +@click.option('-O', + '--optimization', default='default', cls=CeleryOption, type=click.Choice(('default', 'fair')), From 1ef9e5111b5c4bcb2235f2fed52fd9d25d67fc52 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Thu, 15 Jun 2023 07:52:02 -0500 Subject: [PATCH 0421/1051] Scheduled weekly dependency update for week 24 (#8309) * Pin sphinx to latest version 7.0.1 * Update pytest from 7.3.1 to 7.3.2 * Update pre-commit from 2.21.0 to 3.3.2 * Pin elasticsearch to latest version 8.8.0 * Update requirements/extras/elasticsearch.txt * Update requirements/docs.txt * Update requirements/docs.txt * Update requirements/docs.txt --------- Co-authored-by: Asif Saif Uddin --- requirements/docs.txt | 4 ++-- requirements/test.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index dc9fc872228..fac534b02cf 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ -sphinx_celery~=2.0.0 -Sphinx>=3.0.0,<6.0.0 +sphinx_celery>=2.0.0 +Sphinx==5.3.0 sphinx-testing~=1.0.1 sphinx-click==4.4.0 -r extras/sqlalchemy.txt diff --git a/requirements/test.txt b/requirements/test.txt index f7fa249f3c0..1ad633ce95b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.3.1 +pytest==7.3.2 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.1.0 @@ -8,7 +8,7 @@ boto3>=1.26.143 moto>=4.1.11 # typing extensions mypy==1.3.0; platform_python_implementation=="CPython" -pre-commit==2.21.0 +pre-commit==3.3.2 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From e2a02effdbd2c35db09b766df6c20daf7f293f55 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 15 Jun 2023 23:37:25 +0600 Subject: [PATCH 0422/1051] restrict redis 4.5.5 as it has severe bugs (#8317) * try to restrict redis 4.5.5 * Update redis.txt --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index 16c0c206a11..5bb61f15f33 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.5.2 +redis>=4.5.2,!=4.5.5 From f17e630f5ee579b07c8c1bffa38f666ac315bd61 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 17 Jun 2023 12:31:01 +0600 Subject: [PATCH 0423/1051] Update pypy version & CI (#8320) * Update pypy version & CI * Update .github/workflows/python-package.yml --- .github/workflows/python-package.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e88812521b3..00ee177e685 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,17 +29,19 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.9'] + python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.9', 'pypy-3.10'] os: ["ubuntu-latest", "windows-latest"] exclude: - python-version: 'pypy-3.9' os: "windows-latest" + - python-version: 'pypy-3.10' + os: "windows-latest" steps: - name: Install apt packages if: startsWith(matrix.os, 'ubuntu-') run: | - sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 @@ -57,7 +59,7 @@ jobs: run: | tox --verbose --verbose - - uses: codecov/codecov-action@v3.1.0 + - uses: codecov/codecov-action@v3 with: flags: unittests # optional fail_ci_if_error: true # optional (default = false) @@ -95,7 +97,7 @@ jobs: steps: - name: Install apt packages run: | - sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} From fc1c38a7ed376e21b97b6ac57cbc7ac7050efebc Mon Sep 17 00:00:00 2001 From: Ipakeev <23178128+ipakeev@users.noreply.github.com> Date: Sun, 18 Jun 2023 06:06:56 +0300 Subject: [PATCH 0424/1051] fixed a small float value of retry_backoff (#8295) * fixed a small float value of retry_backoff * added tests * added tests with retry_jitter * fixed imports * fixed coverage --- celery/app/autoretry.py | 4 +- t/unit/tasks/test_tasks.py | 116 ++++++++++++++++++++++++------------- 2 files changed, 78 insertions(+), 42 deletions(-) diff --git a/celery/app/autoretry.py b/celery/app/autoretry.py index 15747e5173f..80bd81f53bf 100644 --- a/celery/app/autoretry.py +++ b/celery/app/autoretry.py @@ -18,7 +18,7 @@ def add_autoretry_behaviour(task, **options): retry_kwargs = options.get( 'retry_kwargs', getattr(task, 'retry_kwargs', {}) ) - retry_backoff = int( + retry_backoff = float( options.get('retry_backoff', getattr(task, 'retry_backoff', False)) ) @@ -48,7 +48,7 @@ def run(*args, **kwargs): if retry_backoff: retry_kwargs['countdown'] = \ get_exponential_backoff_interval( - factor=retry_backoff, + factor=int(max(1.0, retry_backoff)), retries=task.request.retries, maximum=retry_backoff_max, full_jitter=retry_jitter) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index c90d9cdd0f0..36bb792b16d 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -248,28 +248,6 @@ def autoretry_arith_task(self, a, b): self.autoretry_arith_task = autoretry_arith_task - @self.app.task(bind=True, autoretry_for=(HTTPError,), - retry_backoff=True, shared=False) - def autoretry_backoff_task(self, url): - self.iterations += 1 - if "error" in url: - fp = tempfile.TemporaryFile() - raise HTTPError(url, '500', 'Error', '', fp) - return url - - self.autoretry_backoff_task = autoretry_backoff_task - - @self.app.task(bind=True, autoretry_for=(HTTPError,), - retry_backoff=True, retry_jitter=True, shared=False) - def autoretry_backoff_jitter_task(self, url): - self.iterations += 1 - if "error" in url: - fp = tempfile.TemporaryFile() - raise HTTPError(url, '500', 'Error', '', fp) - return url - - self.autoretry_backoff_jitter_task = autoretry_backoff_jitter_task - @self.app.task(bind=True, base=TaskWithRetry, shared=False) def autoretry_for_from_base_task(self, a, b): self.iterations += 1 @@ -616,25 +594,62 @@ def test_autoretry_arith(self): self.autoretry_arith_task.apply((1, 0)) assert self.autoretry_arith_task.iterations == 1 - @patch('random.randrange', side_effect=lambda i: i - 1) - def test_autoretry_backoff(self, randrange): - task = self.autoretry_backoff_task - task.max_retries = 3 + @pytest.mark.parametrize( + 'retry_backoff, expected_countdowns', + [ + (False, [None, None, None, None]), + (0, [None, None, None, None]), + (0.0, [None, None, None, None]), + (True, [1, 2, 4, 8]), + (-1, [1, 2, 4, 8]), + (0.1, [1, 2, 4, 8]), + (1, [1, 2, 4, 8]), + (1.9, [1, 2, 4, 8]), + (2, [2, 4, 8, 16]), + ], + ) + def test_autoretry_backoff(self, retry_backoff, expected_countdowns): + @self.app.task(bind=True, shared=False, autoretry_for=(ZeroDivisionError,), + retry_backoff=retry_backoff, retry_jitter=False, max_retries=3) + def task(self_, x, y): + self_.iterations += 1 + return x / y + task.iterations = 0 with patch.object(task, 'retry', wraps=task.retry) as fake_retry: - task.apply(("http://httpbin.org/error",)) + task.apply((1, 0)) assert task.iterations == 4 retry_call_countdowns = [ - call_[1]['countdown'] for call_ in fake_retry.call_args_list + call_[1].get('countdown') for call_ in fake_retry.call_args_list ] - assert retry_call_countdowns == [1, 2, 4, 8] - + assert retry_call_countdowns == expected_countdowns + + @pytest.mark.parametrize( + 'retry_backoff, expected_countdowns', + [ + (False, [None, None, None, None]), + (0, [None, None, None, None]), + (0.0, [None, None, None, None]), + (True, [0, 1, 3, 7]), + (-1, [0, 1, 3, 7]), + (0.1, [0, 1, 3, 7]), + (1, [0, 1, 3, 7]), + (1.9, [0, 1, 3, 7]), + (2, [1, 3, 7, 15]), + ], + ) @patch('random.randrange', side_effect=lambda i: i - 2) - def test_autoretry_backoff_jitter(self, randrange): - task = self.autoretry_backoff_jitter_task - task.max_retries = 3 + def test_autoretry_backoff_jitter(self, randrange, retry_backoff, expected_countdowns): + @self.app.task(bind=True, shared=False, autoretry_for=(HTTPError,), + retry_backoff=retry_backoff, retry_jitter=True, max_retries=3) + def task(self_, url): + self_.iterations += 1 + if "error" in url: + fp = tempfile.TemporaryFile() + raise HTTPError(url, '500', 'Error', '', fp) + task.iterations = 0 with patch.object(task, 'retry', wraps=task.retry) as fake_retry: @@ -642,9 +657,9 @@ def test_autoretry_backoff_jitter(self, randrange): assert task.iterations == 4 retry_call_countdowns = [ - call_[1]['countdown'] for call_ in fake_retry.call_args_list + call_[1].get('countdown') for call_ in fake_retry.call_args_list ] - assert retry_call_countdowns == [0, 1, 3, 7] + assert retry_call_countdowns == expected_countdowns def test_autoretry_for_from_base(self): self.autoretry_for_from_base_task.iterations = 0 @@ -744,12 +759,26 @@ def test_retry_wrong_eta_when_not_enable_utc(self): self.autoretry_task.apply((1, 0)) assert self.autoretry_task.iterations == 6 - def test_autoretry_class_based_task(self): + @pytest.mark.parametrize( + 'backoff_value, expected_countdowns', + [ + (False, [None, None, None]), + (0, [None, None, None]), + (0.0, [None, None, None]), + (True, [1, 2, 4]), + (-1, [1, 2, 4]), + (0.1, [1, 2, 4]), + (1, [1, 2, 4]), + (1.9, [1, 2, 4]), + (2, [2, 4, 8]), + ], + ) + def test_autoretry_class_based_task(self, backoff_value, expected_countdowns): class ClassBasedAutoRetryTask(Task): name = 'ClassBasedAutoRetryTask' autoretry_for = (ZeroDivisionError,) - retry_kwargs = {'max_retries': 5} - retry_backoff = True + retry_kwargs = {'max_retries': 2} + retry_backoff = backoff_value retry_backoff_max = 700 retry_jitter = False iterations = 0 @@ -762,8 +791,15 @@ def run(self, x, y): task = ClassBasedAutoRetryTask() self.app.tasks.register(task) task.iterations = 0 - task.apply([1, 0]) - assert task.iterations == 6 + + with patch.object(task, 'retry', wraps=task.retry) as fake_retry: + task.apply((1, 0)) + + assert task.iterations == 3 + retry_call_countdowns = [ + call_[1].get('countdown') for call_ in fake_retry.call_args_list + ] + assert retry_call_countdowns == expected_countdowns class test_canvas_utils(TasksCase): From a2ba805a16ebf23391b8b9c0da325a0e3550a9e5 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 19:04:43 +0600 Subject: [PATCH 0425/1051] test new version of kombu (#8323) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index c51039d0c73..af4f8b35bca 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.1.0,<5.0 -kombu>=5.3.0,<6.0 +kombu>=5.3.1,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 9e5f46cee280e04d363ec899c375ec312f1fb92b Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 19:45:17 +0600 Subject: [PATCH 0426/1051] limit pyro4 up to python 3.10 only as it is (#8324) --- requirements/extras/pyro.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pyro.txt b/requirements/extras/pyro.txt index bde9e2995b9..c52c0b19b02 100644 --- a/requirements/extras/pyro.txt +++ b/requirements/extras/pyro.txt @@ -1 +1 @@ -pyro4==4.82 +pyro4==4.82; python_version < '3.11' From b4f9bf090c695d8122956994f7c484bd0a65d56f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 19:52:28 +0600 Subject: [PATCH 0427/1051] Bump kombu version in bdist_rpm --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index fffebc3afb3..a1fc752e35a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,7 +35,7 @@ per-file-ignores = requires = backports.zoneinfo>=0.2.1;python_version<'3.9' tzdata>=2022.7 billiard >=4.1.0,<5.0 - kombu >= 5.3.0,<6.0.0 + kombu >= 5.3.1,<6.0.0 [bdist_wheel] universal = 0 From 458d06cb48e174ff439b55ace0104155c9fea877 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 19:54:43 +0600 Subject: [PATCH 0428/1051] update security supported versions --- SECURITY.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 45213f838de..61902e2c492 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -7,9 +7,9 @@ currently being supported with security updates. | Version | Supported | | ------- | ------------------ | -| 5.2.x | :white_check_mark: | -| 5.0.x | :x: | -| 5.1.x | :white_check_mark: | +| 5.3.x | :white_check_mark: | +| 5.2.x | :x: | +| 5.1.x | :x: | | < 5.0 | :x: | ## Reporting a Vulnerability From 8ef7e0895c4e8f54bd79911ca1478fef19a4e6ea Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 19:59:27 +0600 Subject: [PATCH 0429/1051] update copyright --- LICENSE | 5 +++-- celery/__init__.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/LICENSE b/LICENSE index c0fdb269466..93411068ab7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ -Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. +Copyright (c) 2017-2026 Asif Saif Uddin, core team & contributors. All rights reserved. +Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. Copyright (c) 2012-2014 GoPivotal, Inc. All rights reserved. -Copyright (c) 2009, 2010, 2011, 2012 Ask Solem, and individual contributors. All rights reserved. +Copyright (c) 2009, 2010, 2011, 2012 Ask Solem, and individual contributors. All rights reserved. Celery is licensed under The BSD License (3 Clause, also known as the new BSD license). The license is an OSI approved Open Source diff --git a/celery/__init__.py b/celery/__init__.py index 52ec6194e78..aa1043dd2b4 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -1,5 +1,5 @@ """Distributed Task Queue.""" -# :copyright: (c) 2016-2026 Asif Saif Uddin, celery core and individual +# :copyright: (c) 2017-2026 Asif Saif Uddin, celery core and individual # contributors, All rights reserved. # :copyright: (c) 2015-2016 Ask Solem. All rights reserved. # :copyright: (c) 2012-2014 GoPivotal, Inc., All rights reserved. From 45d2aa001e93bcdbd4162f6aa379b4002ab34a15 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 20:10:29 +0600 Subject: [PATCH 0430/1051] added changelog for v5.3.1 --- Changelog.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index c334d1249fd..b849ee19e87 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -9,6 +9,26 @@ in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.3.1: + +5.3.1 +===== + +:release-date: 2023-06-18 8:15 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Upgrade to latest pycurl release (#7069). +- Limit librabbitmq>=2.0.0; python_version < '3.11' (#8302). +- Added initial support for python 3.11 (#8304). +- ChainMap observers fix (#8305). +- Revert optimization CLI flag behaviour back to original. +- Restrict redis 4.5.5 as it has severe bugs (#8317). +- Tested pypy 3.10 version in CI (#8320). +- Bump new version of kombu to 5.3.1 (#8323). +- Limit pyro4 up to python 3.10 only as it is (#8324). + + + .. _version-5.3.0: 5.3.0 From 7073e3b77d92ba9426cd0fc12d2f6c4d4254ac65 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 20:11:23 +0600 Subject: [PATCH 0431/1051] added changelog for v5.3.1 --- Changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/Changelog.rst b/Changelog.rst index b849ee19e87..cea8615026c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -25,6 +25,7 @@ an overview of what's new in Celery 5.3. - Restrict redis 4.5.5 as it has severe bugs (#8317). - Tested pypy 3.10 version in CI (#8320). - Bump new version of kombu to 5.3.1 (#8323). +- Fixed a small float value of retry_backoff (#8295). - Limit pyro4 up to python 3.10 only as it is (#8324). From 11b854aa32edd65982be1210d41a25bcddab8642 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 18 Jun 2023 20:17:03 +0600 Subject: [PATCH 0432/1051] =?UTF-8?q?Bump=20version:=205.3.0=20=E2=86=92?= =?UTF-8?q?=205.3.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 6 +++--- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 83c9418ed35..b44605ec017 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.0 +current_version = 5.3.1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 8c8852936a5..71d07309f1a 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.0 (emerald-rush) +:Version: 5.3.1 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.0 runs on, +Celery version 5.3.1 runs on, - Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.0 coming from previous versions then you should read our +new to Celery v5.3.1 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index aa1043dd2b4..1bff85d8bdf 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.0' +__version__ = '5.3.1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index f57870a3c5c..509854d4595 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.0 (emerald-rush) +:Version: 5.3.1 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 447caaebdb44542e5b78a1cc55f9a319006143a5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 22:37:52 +0300 Subject: [PATCH 0433/1051] [pre-commit.ci] pre-commit autoupdate (#8328) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.6.0 → v3.7.0](https://github.com/asottile/pyupgrade/compare/v3.6.0...v3.7.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e2ac75c83ed..f78ff93978a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.6.0 + rev: v3.7.0 hooks: - id: pyupgrade args: ["--py38-plus"] From 424b71f37acbbf6b29c9717aa34cf5259f1f8803 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 24 Jun 2023 12:57:07 +0300 Subject: [PATCH 0434/1051] Bugfix: Removed unecessary stamping code from _chord.run() --- celery/canvas.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 06fef05d253..c0601b2454b 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -2216,9 +2216,6 @@ def run(self, header, body, partial_args, app=None, interval=None, options = dict(self.options, **options) if options else self.options if options: options.pop('task_id', None) - stamped_headers = set(body.options.get("stamped_headers", [])) - stamped_headers.update(options.get("stamped_headers", [])) - options["stamped_headers"] = list(stamped_headers) body.options.update(options) bodyres = body.freeze(task_id, root_id=root_id) From ab03fd2154687fbbbbb38b89c1a3b5a55881078c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 26 Jun 2023 13:12:00 +0300 Subject: [PATCH 0435/1051] User guide fix (hotfix for #1755) (#8342) --- docs/userguide/calling.rst | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 6bffd47fbf6..ed562faa4e5 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -379,12 +379,6 @@ and can contain the following keys: `retry_errors` is a tuple of exception classes that should be retried. It will be ignored if not specified. Default is None (ignored). - .. warning:: - - If you specify a tuple of exception classes, you must make sure - that you also specify the ``max_retries`` option, otherwise - you will get an error. - For example, if you want to retry only tasks that were timed out, you can use :exc:`~kombu.exceptions.TimeoutError`: From e6b8db5f8417712af7db964dc02d504bec30c60c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 05:02:56 +0300 Subject: [PATCH 0436/1051] [pre-commit.ci] pre-commit autoupdate (#8345) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.3.0 → v1.4.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.3.0...v1.4.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f78ff93978a..b3b464971de 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.4.1 hooks: - id: mypy pass_filenames: false From 51b28461d0d8b2fdf7db8a7cd2368ba11222bb6d Mon Sep 17 00:00:00 2001 From: Aaron Stephens Date: Fri, 23 Jun 2023 15:10:10 -0700 Subject: [PATCH 0437/1051] fix(backends.database): store children --- celery/backends/database/models.py | 2 ++ t/unit/backends/test_database.py | 1 + 2 files changed, 3 insertions(+) diff --git a/celery/backends/database/models.py b/celery/backends/database/models.py index 1c766b51ca4..f2a56965ccf 100644 --- a/celery/backends/database/models.py +++ b/celery/backends/database/models.py @@ -25,6 +25,7 @@ class Task(ResultModelBase): date_done = sa.Column(sa.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True) traceback = sa.Column(sa.Text, nullable=True) + children = sa.Column(PickleType, nullable=True) def __init__(self, task_id): self.task_id = task_id @@ -36,6 +37,7 @@ def to_dict(self): 'result': self.result, 'traceback': self.traceback, 'date_done': self.date_done, + 'children': self.children, } def __repr__(self): diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index a693f383f67..a5d11b18c65 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -99,6 +99,7 @@ def test_missing_task_meta_is_dict_with_pending(self): assert meta['task_id'] == 'xxx-does-not-exist-at-all' assert meta['result'] is None assert meta['traceback'] is None + assert meta['children'] is None def test_mark_as_done(self): tb = DatabaseBackend(self.uri, app=self.app) From bd590c0965969845e3faed616475d17d468bbf47 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 28 Jun 2023 15:43:58 +0300 Subject: [PATCH 0438/1051] Stamping bugfix with group/chord header errback linking (#8347) * Added unit test test_link_error_does_not_duplicate_stamps() * group.link_error nows link to a clone of the errback * chord.link_error nows link to a clone of the errback when allow_error_cb_on_chord_header is True on the chord header * Cleanup * Added immutable=True * Fixed test_flag_allow_error_cb_on_chord_header() * Fixed test_link_error_on_chord_header() * Fixed test_link_error() * Enhanced test_link_error_does_not_duplicate_stamps() with chord body & chain cases --- celery/canvas.py | 4 +- t/unit/tasks/test_canvas.py | 6 +-- t/unit/tasks/test_stamping.py | 71 +++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 5 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index c0601b2454b..4368d8f68bc 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1672,7 +1672,7 @@ def link_error(self, sig): # # We return a concretised tuple of the signatures actually applied to # each child task signature, of which there might be none! - return tuple(child_task.link_error(sig) for child_task in self.tasks) + return tuple(child_task.link_error(sig.clone(immutable=True)) for child_task in self.tasks) def _prepared(self, tasks, partial_args, group_id, root_id, app, CallableSignature=abstract.CallableSignature, @@ -2273,7 +2273,7 @@ def link_error(self, errback): """ if self.app.conf.task_allow_error_cb_on_chord_header: for task in self.tasks: - task.link_error(errback) + task.link_error(errback.clone(immutable=True)) else: # Once this warning is removed, the whole method needs to be refactored to: # 1. link the error callback to each task in the header diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 8f3fbd25ec0..2c3f4f12f3e 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -860,7 +860,7 @@ def test_link_error(self): # We expect that all group children will be given the errback to ensure # it gets called for child_sig in g1.tasks: - child_sig.link_error.assert_called_with(sig) + child_sig.link_error.assert_called_with(sig.clone(immutable=True)) def test_apply_empty(self): x = group(app=self.app) @@ -1669,7 +1669,7 @@ def test_flag_allow_error_cb_on_chord_header(self, subtests): chord_sig.link_error(errback_sig) # header for child_sig in header_mock: - child_sig.link_error.assert_called_once_with(errback_sig) + child_sig.link_error.assert_called_once_with(errback_sig.clone(immutable=True)) # body body.link_error.assert_has_calls([call(errback_sig), call(errback_sig)]) @@ -1717,7 +1717,7 @@ def test_link_error_on_chord_header(self, header): errback = c.link_error(err) assert errback == err for header_task in c.tasks: - assert header_task.options['link_error'] == [err] + assert header_task.options['link_error'] == [err.clone(immutable=True)] assert c.body.options['link_error'] == [err] diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index da167bd0bc3..3d139abb9e9 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -365,6 +365,19 @@ def return_True(*args, **kwargs): class CanvasCase: def setup_method(self): + @self.app.task(shared=False) + def identity(x): + return x + + self.identity = identity + + @self.app.task(shared=False) + def fail(*args): + args = ("Task expected to fail",) + args + raise Exception(*args) + + self.fail = fail + @self.app.task(shared=False) def add(x, y): return x + y @@ -1243,3 +1256,61 @@ def test_retry_stamping(self): sig = self.retry_task.signature_from_request() assert sig.options['stamped_headers'] == ['stamp'] assert sig.options['stamp'] == 'value' + + def test_link_error_does_not_duplicate_stamps(self, subtests): + class CustomStampingVisitor(StampingVisitor): + def on_group_start(self, group, **headers): + return {} + + def on_chain_start(self, chain, **headers): + return {} + + def on_signature(self, sig, **headers): + existing_headers = sig.options.get("headers") or {} + existing_stamps = existing_headers.get("stamps") or {} + existing_stamp = existing_stamps.get("stamp") + existing_stamp = existing_stamp or sig.options.get("stamp") + if existing_stamp is None: + stamp = str(uuid.uuid4()) + return {"stamp": stamp} + else: + assert False, "stamp already exists" + + def s(n, fail_flag=False): + if not fail_flag: + return self.identity.si(str(n)) + return self.fail.si(str(n)) + + def tasks(): + tasks = [] + for i in range(0, 4): + fail_flag = False + if i: + fail_flag = True + sig = s(i, fail_flag) + sig.link(s(f"link{str(i)}")) + sig.link_error(s(f"link_error{str(i)}")) + tasks.append(sig) + return tasks + + with subtests.test("group"): + canvas = group(tasks()) + canvas.link_error(s("group_link_error")) + canvas.stamp(CustomStampingVisitor()) + + with subtests.test("chord header"): + self.app.conf.task_allow_error_cb_on_chord_header = True + canvas = chord(tasks(), self.identity.si("body")) + canvas.link_error(s("group_link_error")) + canvas.stamp(CustomStampingVisitor()) + + with subtests.test("chord body"): + self.app.conf.task_allow_error_cb_on_chord_header = False + canvas = chord(tasks(), self.identity.si("body")) + canvas.link_error(s("group_link_error")) + canvas.stamp(CustomStampingVisitor()) + + with subtests.test("chain"): + canvas = chain(tasks()) + canvas.link_error(s("chain_link_error")) + canvas.stamp(CustomStampingVisitor()) From 6d7352eb1d0baa78252d96779b05c904d0b8a2e9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 13:25:03 +0300 Subject: [PATCH 0439/1051] [pre-commit.ci] pre-commit autoupdate (#8355) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.7.0 → v3.8.0](https://github.com/asottile/pyupgrade/compare/v3.7.0...v3.8.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b3b464971de..a21e0da53b5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.7.0 + rev: v3.8.0 hooks: - id: pyupgrade args: ["--py38-plus"] From c4a4dd8bdd50e0765cee32ed3867b27c295c64f9 Mon Sep 17 00:00:00 2001 From: zhu Date: Sat, 8 Jul 2023 12:49:29 +0800 Subject: [PATCH 0440/1051] Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) * Use argsrepr and kwargsrepr in LOG_RECEIVED * use argsrepr/kwargsrepr in success and error log * add `eta` to task received log context * remove unused import * add integration test for task trace/log * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * testing worker: pickle traceback in log queue if possible * Update celery/contrib/testing/worker.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/app/trace.py | 8 ++--- celery/contrib/testing/worker.py | 38 +++++++++++++++++++++++ celery/worker/strategy.py | 6 ++-- t/integration/test_tasks.py | 52 ++++++++++++++++++++++++++++++++ t/unit/worker/test_strategy.py | 33 ++++++++++++++++++-- 5 files changed, 128 insertions(+), 9 deletions(-) diff --git a/celery/app/trace.py b/celery/app/trace.py index 59bcb5182c0..3933d01a481 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -250,8 +250,8 @@ def _log_error(self, task, req, einfo): safe_repr(eobj), safe_str(einfo.traceback), einfo.exc_info, - safe_repr(req.args), - safe_repr(req.kwargs), + req.get('argsrepr') or safe_repr(req.args), + req.get('kwargsrepr') or safe_repr(req.kwargs), ) policy = get_log_policy(task, einfo, eobj) @@ -559,8 +559,8 @@ def trace_task( 'name': get_task_name(task_request, name), 'return_value': Rstr, 'runtime': T, - 'args': safe_repr(args), - 'kwargs': safe_repr(kwargs), + 'args': task_request.get('argsrepr') or safe_repr(args), + 'kwargs': task_request.get('kwargsrepr') or safe_repr(kwargs), }) # -* POST *- diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index d01e82c6bfc..fa8f6889682 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -1,4 +1,5 @@ """Embedded workers for integration tests.""" +import logging import os import threading from contextlib import contextmanager @@ -29,11 +30,48 @@ class TestWorkController(worker.WorkController): """Worker that can synchronize on being fully started.""" + logger_queue = None + def __init__(self, *args, **kwargs): # type: (*Any, **Any) -> None self._on_started = threading.Event() + super().__init__(*args, **kwargs) + if self.pool_cls.__module__.split('.')[-1] == 'prefork': + from billiard import Queue + self.logger_queue = Queue() + self.pid = os.getpid() + + try: + from tblib import pickling_support + pickling_support.install() + except ImportError: + pass + + # collect logs from forked process. + # XXX: those logs will appear twice in the live log + self.queue_listener = logging.handlers.QueueListener(self.logger_queue, logging.getLogger()) + self.queue_listener.start() + + class QueueHandler(logging.handlers.QueueHandler): + def prepare(self, record): + record.from_queue = True + # Keep origin record. + return record + + def handleError(self, record): + if logging.raiseExceptions: + raise + + def start(self): + if self.logger_queue: + handler = self.QueueHandler(self.logger_queue) + handler.addFilter(lambda r: r.process != self.pid and not getattr(r, 'from_queue', False)) + logger = logging.getLogger() + logger.addHandler(handler) + return super().start() + def on_consumer_ready(self, consumer): # type: (celery.worker.consumer.Consumer) -> None """Callback called when the Consumer blueprint is fully started.""" diff --git a/celery/worker/strategy.py b/celery/worker/strategy.py index b6e9a17c6b6..3fe5fa145ca 100644 --- a/celery/worker/strategy.py +++ b/celery/worker/strategy.py @@ -2,7 +2,6 @@ import logging from kombu.asynchronous.timer import to_timestamp -from kombu.utils.encoding import safe_repr from celery import signals from celery.app import trace as _app_trace @@ -155,8 +154,9 @@ def task_message_handler(message, body, ack, reject, callbacks, context = { 'id': req.id, 'name': req.name, - 'args': safe_repr(req.args), - 'kwargs': safe_repr(req.kwargs), + 'args': req.argsrepr, + 'kwargs': req.kwargsrepr, + 'eta': req.eta, } info(_app_trace.LOG_RECEIVED, context, extra={'data': context}) if (req.expires or req.id in revoked_tasks) and req.revoked(): diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 31f6659e722..7ca09345253 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,3 +1,4 @@ +import logging import time from datetime import datetime, timedelta from time import perf_counter, sleep @@ -465,6 +466,57 @@ def test_properties(self, celery_session_worker): assert res.get(timeout=TIMEOUT)["app_id"] == "1234" +class test_trace_log_arguments: + args = "CUSTOM ARGS" + kwargs = "CUSTOM KWARGS" + + def assert_trace_log(self, caplog, result, expected): + # wait for logs from worker + sleep(.01) + + records = [(r.name, r.levelno, r.msg, r.data["args"], r.data["kwargs"]) + for r in caplog.records + if r.name in {'celery.worker.strategy', 'celery.app.trace'} + if r.data["id"] == result.task_id + ] + assert records == [(*e, self.args, self.kwargs) for e in expected] + + def call_task_with_reprs(self, task): + return task.set(argsrepr=self.args, kwargsrepr=self.kwargs).delay() + + @flaky + def test_task_success(self, caplog): + result = self.call_task_with_reprs(add.s(2, 2)) + value = result.get() + assert value == 4 + assert result.successful() is True + + self.assert_trace_log(caplog, result, [ + ('celery.worker.strategy', logging.INFO, + celery.app.trace.LOG_RECEIVED, + ), + ('celery.app.trace', logging.INFO, + celery.app.trace.LOG_SUCCESS, + ), + ]) + + @flaky + def test_task_failed(self, caplog): + result = self.call_task_with_reprs(fail.s(2, 2)) + with pytest.raises(ExpectedException): + result.get(timeout=5) + assert result.failed() is True + + self.assert_trace_log(caplog, result, [ + ('celery.worker.strategy', logging.INFO, + celery.app.trace.LOG_RECEIVED, + ), + ('celery.app.trace', logging.ERROR, + celery.app.trace.LOG_FAILURE, + ), + ]) + + class test_task_redis_result_backend: @pytest.fixture() def manager(self, manager): diff --git a/t/unit/worker/test_strategy.py b/t/unit/worker/test_strategy.py index 366d5c62081..30c50b98455 100644 --- a/t/unit/worker/test_strategy.py +++ b/t/unit/worker/test_strategy.py @@ -117,7 +117,7 @@ def get_request(self): if self.was_rate_limited(): return self.consumer._limit_task.call_args[0][0] if self.was_scheduled(): - return self.consumer.timer.call_at.call_args[0][0] + return self.consumer.timer.call_at.call_args[0][2][0] raise ValueError('request not handled') @contextmanager @@ -176,10 +176,23 @@ def test_log_task_received(self, caplog): for record in caplog.records: if record.msg == LOG_RECEIVED: assert record.levelno == logging.INFO + assert record.args['eta'] is None break else: raise ValueError("Expected message not in captured log records") + def test_log_eta_task_received(self, caplog): + caplog.set_level(logging.INFO, logger="celery.worker.strategy") + with self._context(self.add.s(2, 2).set(countdown=10)) as C: + C() + req = C.get_request() + for record in caplog.records: + if record.msg == LOG_RECEIVED: + assert record.args['eta'] == req.eta + break + else: + raise ValueError("Expected message not in captured log records") + def test_log_task_received_custom(self, caplog): caplog.set_level(logging.INFO, logger="celery.worker.strategy") custom_fmt = "CUSTOM MESSAGE" @@ -191,7 +204,23 @@ def test_log_task_received_custom(self, caplog): C() for record in caplog.records: if record.msg == custom_fmt: - assert set(record.args) == {"id", "name", "kwargs", "args"} + assert set(record.args) == {"id", "name", "kwargs", "args", "eta"} + break + else: + raise ValueError("Expected message not in captured log records") + + def test_log_task_arguments(self, caplog): + caplog.set_level(logging.INFO, logger="celery.worker.strategy") + args = "CUSTOM ARGS" + kwargs = "CUSTOM KWARGS" + with self._context( + self.add.s(2, 2).set(argsrepr=args, kwargsrepr=kwargs) + ) as C: + C() + for record in caplog.records: + if record.msg == LOG_RECEIVED: + assert record.args["args"] == args + assert record.args["kwargs"] == kwargs break else: raise ValueError("Expected message not in captured log records") From 22d15bec59766e5c2838039d81d62f4854a2aa5e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 17:39:06 +0000 Subject: [PATCH 0441/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.8.0 → v3.9.0](https://github.com/asottile/pyupgrade/compare/v3.8.0...v3.9.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a21e0da53b5..7d59589adc6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.8.0 + rev: v3.9.0 hooks: - id: pyupgrade args: ["--py38-plus"] From 811ed96edbf7d7ae0681ae67ced63e6994a6e63a Mon Sep 17 00:00:00 2001 From: Daniel Wysocki Date: Tue, 11 Jul 2023 11:53:22 -0500 Subject: [PATCH 0442/1051] Fixing minor typo in code example in calling.rst (#8366) Several examples add additional arguments to `sum.add((2,2), ...)` to make the result `(2+2) + 16 = 20` instead of `2 + 2 = 4`. One example drops the additional arguments yet still returns `20`. I have corrected this to `4`. --- docs/userguide/calling.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index ed562faa4e5..e3c0f84c18c 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -234,7 +234,7 @@ a shortcut to set ETA by seconds into the future. >>> result = add.apply_async((2, 2), countdown=3) >>> result.get() # this takes at least 3 seconds to return - 20 + 4 The task is guaranteed to be executed at some time *after* the specified date and time, but not necessarily at that exact time. From ccc02b744c8ce4218342b32313496696f9c842d7 Mon Sep 17 00:00:00 2001 From: Takehisa Oyama <44559556+ooyamatakehisa@users.noreply.github.com> Date: Sat, 15 Jul 2023 18:25:13 +0900 Subject: [PATCH 0443/1051] add documents for timeout settings (#8373) * add statements for timeout settings * update statement --- celery/result.py | 5 ++++- docs/userguide/workers.rst | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/celery/result.py b/celery/result.py index f66bade1d40..0c9e0a30f21 100644 --- a/celery/result.py +++ b/celery/result.py @@ -204,7 +204,10 @@ def get(self, timeout=None, propagate=True, interval=0.5, Arguments: timeout (float): How long to wait, in seconds, before the - operation times out. + operation times out. This is the setting for the publisher + (celery client) and is different from `timeout` parameter of + `@app.task`, which is the setting for the worker. The task + isn't terminated even if timeout occurs. propagate (bool): Re-raise exception if the task failed. interval (float): Time to wait (in seconds) before retrying to retrieve the result. Note that this does not have any effect diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index e96e80e8c7e..ede6a9881d0 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -573,7 +573,9 @@ time limit kills it: clean_up_in_a_hurry() Time limits can also be set using the :setting:`task_time_limit` / -:setting:`task_soft_time_limit` settings. +:setting:`task_soft_time_limit` settings. You can also specify time +limits for client side operation using ``timeout`` argument of +``AsyncResult.get()`` function. .. note:: From 148fecb85a833295ed64182b636140bf910f6e7f Mon Sep 17 00:00:00 2001 From: Karan Ganesan Date: Tue, 18 Jul 2023 22:11:55 +0530 Subject: [PATCH 0444/1051] fix: copyright year --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 1e906935e91..83ac849e98e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,7 +10,7 @@ github_project='celery/celery', author='Ask Solem & contributors', author_name='Ask Solem', - copyright='2009-2021', + copyright='2009-2023', publisher='Celery Project', html_logo='images/celery_512.png', html_favicon='images/favicon.ico', From 78ab64eb70277f1cea9cc78bbfba087e577c7b7b Mon Sep 17 00:00:00 2001 From: Enno Richter Date: Tue, 18 Jul 2023 14:17:06 +0200 Subject: [PATCH 0445/1051] setup.py: enable include_package_data --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 6b0f0110bd8..f8587da92f6 100755 --- a/setup.py +++ b/setup.py @@ -149,6 +149,7 @@ def long_description(): python_requires=">=3.8", tests_require=reqs('test.txt'), extras_require=extras_require(), + include_package_data=True, entry_points={ 'console_scripts': [ 'celery = celery.__main__:main', From 1c363876147325a196c474e757e355c451a0cdff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliv=C3=A9r=20Kecskem=C3=A9ty?= Date: Tue, 25 Jul 2023 09:06:39 +0200 Subject: [PATCH 0446/1051] Fix eager tasks does not populate name field (#8383) * Add task name to eager request * Add task name to eager result * Adjust tests --- celery/app/task.py | 3 ++- celery/result.py | 4 +++- t/unit/tasks/test_chord.py | 2 +- t/unit/tasks/test_result.py | 16 ++++++++-------- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 21698f5ed6b..021a32aa8cc 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -788,6 +788,7 @@ def apply(self, args=None, kwargs=None, request = { 'id': task_id, + 'task': self.name, 'retries': retries, 'is_eager': True, 'logfile': logfile, @@ -824,7 +825,7 @@ def apply(self, args=None, kwargs=None, if isinstance(retval, Retry) and retval.sig is not None: return retval.sig.apply(retries=retries + 1) state = states.SUCCESS if ret.info is None else ret.info.state - return EagerResult(task_id, retval, state, traceback=tb) + return EagerResult(task_id, self.name, retval, state, traceback=tb) def AsyncResult(self, task_id, **kwargs): """Get AsyncResult instance for the specified task. diff --git a/celery/result.py b/celery/result.py index 0c9e0a30f21..4c12e3edde7 100644 --- a/celery/result.py +++ b/celery/result.py @@ -983,10 +983,11 @@ def restore(cls, id, backend=None, app=None): class EagerResult(AsyncResult): """Result that we know has already been executed.""" - def __init__(self, id, ret_value, state, traceback=None): + def __init__(self, id, name, ret_value, state, traceback=None): # pylint: disable=super-init-not-called # XXX should really not be inheriting from AsyncResult self.id = id + self._name = name self._result = ret_value self._state = state self._traceback = traceback @@ -1038,6 +1039,7 @@ def __repr__(self): @property def _cache(self): return { + 'name': self._name, 'task_id': self.id, 'result': self._result, 'status': self._state, diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index e44c0af4b67..acd5344d7cb 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -46,7 +46,7 @@ def join(self, propagate=True, **kwargs): def _failed_join_report(self): for value in self.value: if isinstance(value, Exception): - yield EagerResult('some_id', value, 'FAILURE') + yield EagerResult('some_id', 'test-task', value, 'FAILURE') class TSRNoReport(TSR): diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 42eaab8987d..814db338f85 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -136,7 +136,7 @@ def test_reduce_direct(self): def test_children(self): x = self.app.AsyncResult('1') - children = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] + children = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] x._cache = {'children': children, 'status': states.SUCCESS} x.backend = Mock() assert x.children @@ -147,12 +147,12 @@ def test_propagates_for_parent(self): x.backend = Mock(name='backend') x.backend.get_task_meta.return_value = {} x.backend.wait_for_pending.return_value = 84 - x.parent = EagerResult(uuid(), KeyError('foo'), states.FAILURE) + x.parent = EagerResult(uuid(), 'test-task', KeyError('foo'), states.FAILURE) with pytest.raises(KeyError): x.get(propagate=True) x.backend.wait_for_pending.assert_not_called() - x.parent = EagerResult(uuid(), 42, states.SUCCESS) + x.parent = EagerResult(uuid(), 'test-task', 42, states.SUCCESS) assert x.get(propagate=True) == 84 x.backend.wait_for_pending.assert_called() @@ -172,7 +172,7 @@ def test_get_children(self): def test_build_graph_get_leaf_collect(self): x = self.app.AsyncResult('1') x.backend._cache['1'] = {'status': states.SUCCESS, 'result': None} - c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] + c = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] x.iterdeps = Mock() x.iterdeps.return_value = ( (None, x), @@ -194,7 +194,7 @@ def test_build_graph_get_leaf_collect(self): def test_iterdeps(self): x = self.app.AsyncResult('1') - c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] + c = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] x._cache = {'status': states.SUCCESS, 'result': None, 'children': c} for child in c: child.backend = Mock() @@ -945,13 +945,13 @@ def test_wait_raises(self): assert res.wait(propagate=False) def test_wait(self): - res = EagerResult('x', 'x', states.RETRY) + res = EagerResult('x', 'test-task', 'x', states.RETRY) res.wait() assert res.state == states.RETRY assert res.status == states.RETRY def test_forget(self): - res = EagerResult('x', 'x', states.RETRY) + res = EagerResult('x', 'test-task', 'x', states.RETRY) res.forget() def test_revoke(self): @@ -962,7 +962,7 @@ def test_revoke(self): def test_get_sync_subtask_option(self, task_join_will_block): task_join_will_block.return_value = True tid = uuid() - res_subtask_async = EagerResult(tid, 'x', 'x', states.SUCCESS) + res_subtask_async = EagerResult(tid, 'test-task', 'x', 'x', states.SUCCESS) with pytest.raises(RuntimeError): res_subtask_async.get() res_subtask_async.get(disable_sync_subtasks=False) From c0282a1ab70576b566c023f695b39145b4adc812 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 25 Jul 2023 19:09:08 +0600 Subject: [PATCH 0447/1051] Update test.txt dependencies (#8389) --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 1ad633ce95b..cbef5bd9126 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.3.2 +pytest==7.4.0 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.1.0 @@ -7,8 +7,8 @@ pytest-order==1.1.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.3.0; platform_python_implementation=="CPython" -pre-commit==3.3.2 +mypy==1.4.1; platform_python_implementation=="CPython" +pre-commit==3.3.3 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From f5e6034f159b29baec5f37a31ccf0cbeded2dfeb Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 25 Jul 2023 19:41:07 +0600 Subject: [PATCH 0448/1051] Update auth.txt deps (#8392) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index d4a35167c7d..ba3e03ecf9e 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.1 +cryptography==41.0.2 From 7c55890214d890041a61c3076ffa9d3566f2ee0e Mon Sep 17 00:00:00 2001 From: Anders Wiklund <126193526+ycc140@users.noreply.github.com> Date: Fri, 28 Jul 2023 08:00:13 +0200 Subject: [PATCH 0449/1051] Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) * Update mongodb.py Fix for bug report: #8387. backend.get_task_meta ignores the result_extended config parameter in mongodb backend * Update mongodb.py Reformatted code with correct indentation. * Update test_mongodb.py Added test method test_get_task_meta_for_result_extended. * Added testcase to verify pull request #8391. * Corrected whitespace. --------- Co-authored-by: Anders Wiklund --- celery/backends/mongodb.py | 15 +++++++++++++++ t/unit/backends/test_mongodb.py | 22 ++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py index 21f5c89afc6..654ca3710c9 100644 --- a/celery/backends/mongodb.py +++ b/celery/backends/mongodb.py @@ -197,6 +197,21 @@ def _get_task_meta_for(self, task_id): """Get task meta-data for a task by id.""" obj = self.collection.find_one({'_id': task_id}) if obj: + if self.app.conf.find_value_for_key('extended', 'result'): + return self.meta_from_decoded({ + 'name': obj['name'], + 'args': obj['args'], + 'task_id': obj['_id'], + 'queue': obj['queue'], + 'kwargs': obj['kwargs'], + 'status': obj['status'], + 'worker': obj['worker'], + 'retries': obj['retries'], + 'children': obj['children'], + 'date_done': obj['date_done'], + 'traceback': obj['traceback'], + 'result': self.decode(obj['result']), + }) return self.meta_from_decoded({ 'task_id': obj['_id'], 'status': obj['status'], diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index a3b037892a9..ae0b85d22ee 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -428,6 +428,28 @@ def test_get_task_meta_for(self, mock_get_database): 'traceback', 'result', 'children', ])) == list(sorted(ret_val.keys())) + @patch('celery.backends.mongodb.MongoBackend._get_database') + def test_get_task_meta_for_result_extended(self, mock_get_database): + self.backend.taskmeta_collection = MONGODB_COLLECTION + + mock_database = MagicMock(spec=['__getitem__', '__setitem__']) + mock_collection = Mock() + mock_collection.find_one.return_value = MagicMock() + + mock_get_database.return_value = mock_database + mock_database.__getitem__.return_value = mock_collection + + self.app.conf.result_extended = True + ret_val = self.backend._get_task_meta_for(sentinel.task_id) + + mock_get_database.assert_called_once_with() + mock_database.__getitem__.assert_called_once_with(MONGODB_COLLECTION) + assert list(sorted([ + 'status', 'task_id', 'date_done', + 'traceback', 'result', 'children', + 'name', 'args', 'queue', 'kwargs', 'worker', 'retries', + ])) == list(sorted(ret_val.keys())) + @patch('celery.backends.mongodb.MongoBackend._get_database') def test_get_task_meta_for_no_result(self, mock_get_database): self.backend.taskmeta_collection = MONGODB_COLLECTION From 7c5e0c1b6e0a6551c271551f5d28fcb1ce7ae4f2 Mon Sep 17 00:00:00 2001 From: dpdoughe Date: Fri, 28 Jul 2023 21:05:39 -0700 Subject: [PATCH 0450/1051] Support preload options for shell and purge commands (#8374) * ENH: Clean up test for preload options * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * LNT * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * ENH: Add a TODO --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/bin/purge.py | 6 ++- celery/bin/shell.py | 6 ++- t/unit/app/test_preload_cli.py | 63 +++++++++++++++++++++++++++ t/unit/bin/proj/pyramid_celery_app.py | 53 ++++++++++++++++++++++ 4 files changed, 124 insertions(+), 4 deletions(-) create mode 100644 t/unit/app/test_preload_cli.py create mode 100644 t/unit/bin/proj/pyramid_celery_app.py diff --git a/celery/bin/purge.py b/celery/bin/purge.py index 7be1a8241fb..cfb6caa9323 100644 --- a/celery/bin/purge.py +++ b/celery/bin/purge.py @@ -5,7 +5,9 @@ from celery.utils import text -@click.command(cls=CeleryCommand) +@click.command(cls=CeleryCommand, context_settings={ + 'allow_extra_args': True +}) @click.option('-f', '--force', cls=CeleryOption, @@ -26,7 +28,7 @@ help="Comma separated list of queues names not to purge.") @click.pass_context @handle_preload_options -def purge(ctx, force, queues, exclude_queues): +def purge(ctx, force, queues, exclude_queues, **kwargs): """Erase all messages from all known task queues. Warning: diff --git a/celery/bin/shell.py b/celery/bin/shell.py index 77b14d8a307..6c94a00870e 100644 --- a/celery/bin/shell.py +++ b/celery/bin/shell.py @@ -79,7 +79,9 @@ def _invoke_default_shell(locals): _invoke_ipython_shell(locals) -@click.command(cls=CeleryCommand) +@click.command(cls=CeleryCommand, context_settings={ + 'allow_extra_args': True +}) @click.option('-I', '--ipython', is_flag=True, @@ -117,7 +119,7 @@ def _invoke_default_shell(locals): @handle_preload_options def shell(ctx, ipython=False, bpython=False, python=False, without_tasks=False, eventlet=False, - gevent=False): + gevent=False, **kwargs): """Start shell session with convenient access to celery symbols. The following symbols will be added to the main globals: diff --git a/t/unit/app/test_preload_cli.py b/t/unit/app/test_preload_cli.py new file mode 100644 index 00000000000..a2241a1400d --- /dev/null +++ b/t/unit/app/test_preload_cli.py @@ -0,0 +1,63 @@ +from click.testing import CliRunner + +from celery.bin.celery import celery + + +def test_preload_options(isolated_cli_runner: CliRunner): + # Verify commands like shell and purge can accept preload options. + # Projects like Pyramid-Celery's ini option should be valid preload + # options. + + # TODO: Find a way to run these separate invoke and assertions + # such that order does not matter. Currently, running + # the "t.unit.bin.proj.pyramid_celery_app" first seems + # to result in cache or memoization of the option. + # As a result, the expected exception is not raised when + # the invoke on "t.unit.bin.proj.app" is run as a second + # call. + + res_without_preload = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "purge", "-f", "--ini", "some_ini.ini"], + catch_exceptions=True, + ) + + assert "No such option: --ini" in res_without_preload.stdout + assert res_without_preload.exit_code == 2 + + res_without_preload = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "shell", "--ini", "some_ini.ini"], + catch_exceptions=True, + ) + + assert "No such option: --ini" in res_without_preload.stdout + assert res_without_preload.exit_code == 2 + + res_with_preload = isolated_cli_runner.invoke( + celery, + [ + "-A", + "t.unit.bin.proj.pyramid_celery_app", + "purge", + "-f", + "--ini", + "some_ini.ini", + ], + catch_exceptions=True, + ) + + assert res_with_preload.exit_code == 0 + + res_with_preload = isolated_cli_runner.invoke( + celery, + [ + "-A", + "t.unit.bin.proj.pyramid_celery_app", + "shell", + "--ini", + "some_ini.ini", + ], + catch_exceptions=True, + ) + assert res_with_preload.exit_code == 0 diff --git a/t/unit/bin/proj/pyramid_celery_app.py b/t/unit/bin/proj/pyramid_celery_app.py new file mode 100644 index 00000000000..4878f95551b --- /dev/null +++ b/t/unit/bin/proj/pyramid_celery_app.py @@ -0,0 +1,53 @@ +from unittest.mock import MagicMock, Mock + +from click import Option + +from celery import Celery + +# This module defines a mocked Celery application to replicate +# the behavior of Pyramid-Celery's configuration by preload options. +# Preload options should propagate to commands like shell and purge etc. +# +# The Pyramid-Celery project https://github.com/sontek/pyramid_celery +# assumes that you want to configure Celery via an ini settings file. +# The .ini files are the standard configuration file for Pyramid +# applications. +# See https://docs.pylonsproject.org/projects/pyramid/en/latest/quick_tutorial/ini.html +# + +app = Celery(set_as_current=False) +app.config_from_object("t.integration.test_worker_config") + + +class PurgeMock: + def queue_purge(self, queue): + return 0 + + +class ConnMock: + default_channel = PurgeMock() + channel_errors = KeyError + + +mock = Mock() +mock.__enter__ = Mock(return_value=ConnMock()) +mock.__exit__ = Mock(return_value=False) + +app.connection_for_write = MagicMock(return_value=mock) + +# Below are taken from pyramid-celery's __init__.py +# Ref: https://github.com/sontek/pyramid_celery/blob/cf8aa80980e42f7235ad361874d3c35e19963b60/pyramid_celery/__init__.py#L25-L36 # noqa: E501 +ini_option = Option( + ( + "--ini", + "-i", + ), + help="Paste ini configuration file.", +) + +ini_var_option = Option( + ("--ini-var",), help="Comma separated list of key=value to pass to ini." +) + +app.user_options["preload"].add(ini_option) +app.user_options["preload"].add(ini_var_option) From f34b1da5d5a5b16c0610e55e3cfefb60e31746e5 Mon Sep 17 00:00:00 2001 From: "Jeremy Z. Othieno" <1661621+othieno@users.noreply.github.com> Date: Sat, 29 Jul 2023 06:11:10 +0200 Subject: [PATCH 0451/1051] Implement safer ArangoDB queries (#8351) * Update pyArango>=2.0.2 * Implement safer ArangoDB queries The AQL queries used in the ArangoDbBackend's implementation are potentially vulnerable to injections because no sanity checks are performed on the arguments used to build the query strings. This is particularly evident when using a database collection with a dash in its name, e.g. "celery-task-results". The query string generated by the set method is 'INSERT {task: v}, _key: "k"} INTO celery-task-results' instead of 'INSERT {task: v}, _key: "k"} INTO `celery-task-results`' (backticks surrounding collection name). The former is evaluated as a substraction (celery - task - results) and is therefore an illegal collection name, while the latter is evaluated as a string. This commit re-implements the setter and getters using bind parameters[1], which performs the necessary safety checks. Furthermore, the new query used in the set method accounts for updates to existing keys, resolving #7039. [1] https://www.arangodb.com/docs/stable/aql/fundamentals-bind-parameters.html * Remove unused imports * Improve tests for the ArangoDbBackend.get method * Improve tests for the ArangoDbBackend.cleanup method * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Improve tests for the ArangoDbBackend.delete method * Add some tests for the ArangoDbBackend.mget method * Add tests for the ArangoDbBackend.set method * Add more tests for the ArangoDbBackend.connection property * Add more tests for the ArangoDbBackend.mget method * Add more tests for the ArangoDbBackend.__init__ method * Update CONTRIBUTORS.txt --------- Co-authored-by: Jeremy Z. Othieno Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CONTRIBUTORS.txt | 1 + celery/backends/arangodb.py | 164 ++++++++++++------------------- requirements/extras/arangodb.txt | 2 +- t/unit/backends/test_arangodb.py | 158 +++++++++++++++++++++++------ 4 files changed, 192 insertions(+), 133 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e8c1dec868b..acf30fe403b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -292,3 +292,4 @@ Oleh Romanovskyi, 2022/06/09 JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 +Jeremy Z. Othieno 2023/07/27 diff --git a/celery/backends/arangodb.py b/celery/backends/arangodb.py index d456d0fc4da..cc9cc48d141 100644 --- a/celery/backends/arangodb.py +++ b/celery/backends/arangodb.py @@ -2,8 +2,6 @@ # pylint: disable=W1202,W0703 -import json -import logging from datetime import timedelta from kombu.utils.objects import cached_property @@ -117,116 +115,76 @@ def db(self): @cached_property def expires_delta(self): - return timedelta(seconds=self.expires) + return timedelta(seconds=0 if self.expires is None else self.expires) def get(self, key): - try: - logging.debug( - 'RETURN DOCUMENT("{collection}/{key}").task'.format( - collection=self.collection, key=key - ) - ) - query = self.db.AQLQuery( - 'RETURN DOCUMENT("{collection}/{key}").task'.format( - collection=self.collection, key=key - ) - ) - result = query.response["result"][0] - if result is None: - return None - return json.dumps(result) - except AQLQueryError as aql_err: - logging.error(aql_err) - return None - except Exception as err: - logging.error(err) + if key is None: return None + query = self.db.AQLQuery( + "RETURN DOCUMENT(@@collection, @key).task", + rawResults=True, + bindVars={ + "@collection": self.collection, + "key": key, + }, + ) + return next(query) if len(query) > 0 else None def set(self, key, value): - """Insert a doc with value into task attribute and _key as key.""" - try: - logging.debug( - 'INSERT {{ task: {task}, _key: "{key}" }} INTO {collection}' - .format( - collection=self.collection, key=key, task=value - ) - ) - self.db.AQLQuery( - 'INSERT {{ task: {task}, _key: "{key}" }} INTO {collection}' - .format( - collection=self.collection, key=key, task=value - ) - ) - except AQLQueryError as aql_err: - logging.error(aql_err) - except Exception as err: - logging.error(err) + self.db.AQLQuery( + """ + UPSERT {_key: @key} + INSERT {_key: @key, task: @value} + UPDATE {task: @value} IN @@collection + """, + bindVars={ + "@collection": self.collection, + "key": key, + "value": value, + }, + ) def mget(self, keys): - try: - json_keys = json.dumps(keys) - logging.debug( - """ - FOR key in {keys} - RETURN DOCUMENT(CONCAT("{collection}/", key)).task - """.format( - collection=self.collection, keys=json_keys - ) - ) - query = self.db.AQLQuery( - """ - FOR key in {keys} - RETURN DOCUMENT(CONCAT("{collection}/", key)).task - """.format( - collection=self.collection, keys=json_keys - ) - ) - results = [] - while True: - results.extend(query.response['result']) + if keys is None: + return + query = self.db.AQLQuery( + "FOR k IN @keys RETURN DOCUMENT(@@collection, k).task", + rawResults=True, + bindVars={ + "@collection": self.collection, + "keys": keys if isinstance(keys, list) else list(keys), + }, + ) + while True: + yield from query + try: query.nextBatch() - except StopIteration: - values = [ - result if result is None else json.dumps(result) - for result in results - ] - return values - except AQLQueryError as aql_err: - logging.error(aql_err) - return [None] * len(keys) - except Exception as err: - logging.error(err) - return [None] * len(keys) + except StopIteration: + break def delete(self, key): - try: - logging.debug( - 'REMOVE {{ _key: "{key}" }} IN {collection}'.format( - key=key, collection=self.collection - ) - ) - self.db.AQLQuery( - 'REMOVE {{ _key: "{key}" }} IN {collection}'.format( - key=key, collection=self.collection - ) - ) - except AQLQueryError as aql_err: - logging.error(aql_err) - except Exception as err: - logging.error(err) + if key is None: + return + self.db.AQLQuery( + "REMOVE {_key: @key} IN @@collection", + bindVars={ + "@collection": self.collection, + "key": key, + }, + ) def cleanup(self): - """Delete expired meta-data.""" - remove_before = (self.app.now() - self.expires_delta).isoformat() - try: - query = ( - 'FOR item IN {collection} ' - 'FILTER item.task.date_done < "{remove_before}" ' - 'REMOVE item IN {collection}' - ).format(collection=self.collection, remove_before=remove_before) - logging.debug(query) - self.db.AQLQuery(query) - except AQLQueryError as aql_err: - logging.error(aql_err) - except Exception as err: - logging.error(err) + if not self.expires: + return + checkpoint = (self.app.now() - self.expires_delta).isoformat() + self.db.AQLQuery( + """ + FOR record IN @@collection + FILTER record.task.date_done < @checkpoint + REMOVE record IN @@collection + """, + bindVars={ + "@collection": self.collection, + "checkpoint": checkpoint, + }, + ) diff --git a/requirements/extras/arangodb.txt b/requirements/extras/arangodb.txt index f081bacacfe..096d6a1c92b 100644 --- a/requirements/extras/arangodb.txt +++ b/requirements/extras/arangodb.txt @@ -1 +1 @@ -pyArango>=2.0.1 +pyArango>=2.0.2 diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py index c35fb162c78..8e86f09b67c 100644 --- a/t/unit/backends/test_arangodb.py +++ b/t/unit/backends/test_arangodb.py @@ -1,6 +1,6 @@ """Tests for the ArangoDb.""" import datetime -from unittest.mock import Mock, patch, sentinel +from unittest.mock import MagicMock, Mock, patch, sentinel import pytest @@ -39,29 +39,118 @@ def test_init_settings_is_None(self): self.app.conf.arangodb_backend_settings = None ArangoDbBackend(app=self.app) + def test_init_url(self): + url = None + expected_database = "celery" + expected_collection = "celery" + backend = ArangoDbBackend(app=self.app, url=url) + assert backend.database == expected_database + assert backend.collection == expected_collection + + url = "arangodb://localhost:27017/celery-database/celery-collection" + expected_database = "celery-database" + expected_collection = "celery-collection" + backend = ArangoDbBackend(app=self.app, url=url) + assert backend.database == expected_database + assert backend.collection == expected_collection + def test_get_connection_connection_exists(self): with patch('pyArango.connection.Connection') as mock_Connection: - self.backend._connection = sentinel._connection - - connection = self.backend._connection - - assert sentinel._connection == connection + self.backend._connection = sentinel.connection + connection = self.backend.connection + assert connection == sentinel.connection mock_Connection.assert_not_called() + expected_connection = mock_Connection() + mock_Connection.reset_mock() # So the assert_called_once below is accurate. + self.backend._connection = None + connection = self.backend.connection + assert connection == expected_connection + mock_Connection.assert_called_once() + def test_get(self): - self.app.conf.arangodb_backend_settings = {} - x = ArangoDbBackend(app=self.app) - x.get = Mock() - x.get.return_value = sentinel.retval - assert x.get('1f3fab') == sentinel.retval - x.get.assert_called_once_with('1f3fab') + self.backend._connection = MagicMock(spec=["__getitem__"]) + + assert self.backend.get(None) is None + self.backend.db.AQLQuery.assert_not_called() + + assert self.backend.get(sentinel.task_id) is None + self.backend.db.AQLQuery.assert_called_once_with( + "RETURN DOCUMENT(@@collection, @key).task", + rawResults=True, + bindVars={ + "@collection": self.backend.collection, + "key": sentinel.task_id, + }, + ) + + self.backend.get = Mock(return_value=sentinel.retval) + assert self.backend.get(sentinel.task_id) == sentinel.retval + self.backend.get.assert_called_once_with(sentinel.task_id) + + def test_set(self): + self.backend._connection = MagicMock(spec=["__getitem__"]) + + assert self.backend.set(sentinel.key, sentinel.value) is None + self.backend.db.AQLQuery.assert_called_once_with( + """ + UPSERT {_key: @key} + INSERT {_key: @key, task: @value} + UPDATE {task: @value} IN @@collection + """, + bindVars={ + "@collection": self.backend.collection, + "key": sentinel.key, + "value": sentinel.value, + }, + ) + + def test_mget(self): + self.backend._connection = MagicMock(spec=["__getitem__"]) + + result = list(self.backend.mget(None)) + expected_result = [] + assert result == expected_result + self.backend.db.AQLQuery.assert_not_called() + + Query = MagicMock(spec=pyArango.query.Query) + query = Query() + query.nextBatch = MagicMock(side_effect=StopIteration()) + self.backend.db.AQLQuery = Mock(return_value=query) + + keys = [sentinel.task_id_0, sentinel.task_id_1] + result = list(self.backend.mget(keys)) + expected_result = [] + assert result == expected_result + self.backend.db.AQLQuery.assert_called_once_with( + "FOR k IN @keys RETURN DOCUMENT(@@collection, k).task", + rawResults=True, + bindVars={ + "@collection": self.backend.collection, + "keys": keys, + }, + ) + + values = [sentinel.value_0, sentinel.value_1] + query.__iter__.return_value = iter([sentinel.value_0, sentinel.value_1]) + result = list(self.backend.mget(keys)) + expected_result = values + assert result == expected_result def test_delete(self): - self.app.conf.arangodb_backend_settings = {} - x = ArangoDbBackend(app=self.app) - x.delete = Mock() - x.delete.return_value = None - assert x.delete('1f3fab') is None + self.backend._connection = MagicMock(spec=["__getitem__"]) + + assert self.backend.delete(None) is None + self.backend.db.AQLQuery.assert_not_called() + + assert self.backend.delete(sentinel.task_id) is None + self.backend.db.AQLQuery.assert_called_once_with( + "REMOVE {_key: @key} IN @@collection", + bindVars={ + "@collection": self.backend.collection, + "key": sentinel.task_id, + }, + ) def test_config_params(self): self.app.conf.arangodb_backend_settings = { @@ -111,18 +200,29 @@ def test_backend_params_by_url(self): assert x.verify is False def test_backend_cleanup(self): - now = datetime.datetime.utcnow() - self.backend.app.now = Mock(return_value=now) - self.backend._connection = { - 'celery': Mock(), - } + self.backend._connection = MagicMock(spec=["__getitem__"]) + self.backend.expires = None self.backend.cleanup() + self.backend.db.AQLQuery.assert_not_called() - expected_date = (now - self.backend.expires_delta).isoformat() - expected_query = ( - 'FOR item IN celery ' - 'FILTER item.task.date_done < "{date}" ' - 'REMOVE item IN celery' - ).format(date=expected_date) - self.backend.db.AQLQuery.assert_called_once_with(expected_query) + self.backend.expires = 0 + self.backend.cleanup() + self.backend.db.AQLQuery.assert_not_called() + + now = datetime.datetime.utcnow() + self.backend.app.now = Mock(return_value=now) + self.backend.expires = 86400 + expected_checkpoint = (now - self.backend.expires_delta).isoformat() + self.backend.cleanup() + self.backend.db.AQLQuery.assert_called_once_with( + """ + FOR record IN @@collection + FILTER record.task.date_done < @checkpoint + REMOVE record IN @@collection + """, + bindVars={ + "@collection": self.backend.collection, + "checkpoint": expected_checkpoint, + }, + ) From f3e2e87e68ed561e2c13d031ca3bdbe7fc99ba12 Mon Sep 17 00:00:00 2001 From: zhu Date: Sat, 29 Jul 2023 12:17:00 +0800 Subject: [PATCH 0452/1051] integration test: cleanup worker after test case (#8361) --- celery/contrib/testing/manager.py | 23 +++++++++++++++++++++++ t/integration/conftest.py | 4 +++- t/integration/test_canvas.py | 7 +++---- t/integration/test_tasks.py | 26 +------------------------- 4 files changed, 30 insertions(+), 30 deletions(-) diff --git a/celery/contrib/testing/manager.py b/celery/contrib/testing/manager.py index 28f05716079..23f43b160f8 100644 --- a/celery/contrib/testing/manager.py +++ b/celery/contrib/testing/manager.py @@ -6,6 +6,7 @@ from itertools import count from typing import Any, Callable, Dict, Sequence, TextIO, Tuple # noqa +from kombu.exceptions import ContentDisallowed from kombu.utils.functional import retry_over_time from celery import states @@ -207,6 +208,28 @@ def true_or_raise(self, fun, *args, **kwargs): raise Sentinel() return res + def wait_until_idle(self): + control = self.app.control + with self.app.connection() as connection: + # Try to purge the queue before we start + # to attempt to avoid interference from other tests + while True: + count = control.purge(connection=connection) + if count == 0: + break + + # Wait until worker is idle + inspect = control.inspect() + inspect.connection = connection + while True: + try: + count = sum(len(t) for t in inspect.active().values()) + except ContentDisallowed: + # test_security_task_done may trigger this exception + break + if count == 0: + break + class Manager(ManagerMixin): """Test helpers for task integration tests.""" diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 34a705b2be5..550bd5d37ba 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -75,7 +75,9 @@ def app(celery_app): @pytest.fixture def manager(app, celery_session_worker): - return Manager(app) + manager = Manager(app) + yield manager + manager.wait_until_idle() @pytest.fixture(autouse=True) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 78105d7ef9e..56266c5075c 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -153,8 +153,8 @@ def test_link_error_callback_error_callback_retries_eager(self): ) assert result.get(timeout=TIMEOUT, propagate=False) == exception - @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception") - def test_link_error_callback_retries(self): + @flaky + def test_link_error_callback_retries(self, manager): exception = ExpectedException("Task expected to fail", "test") result = fail.apply_async( args=("test",), @@ -173,8 +173,7 @@ def test_link_error_using_signature_eager(self): assert (fail.apply().get(timeout=TIMEOUT, propagate=False), True) == ( exception, True) - @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout instead of returning exception") - def test_link_error_using_signature(self): + def test_link_error_using_signature(self, manager): fail = signature('t.integration.tasks.fail', args=("test",)) retrun_exception = signature('t.integration.tasks.return_exception') diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 7ca09345253..f11314c6f9e 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -203,13 +203,6 @@ def test_revoked(self, manager): def test_revoked_by_headers_simple_canvas(self, manager): """Testing revoking of task using a stamped header""" - # Try to purge the queue before we start - # to attempt to avoid interference from other tests - while True: - count = manager.app.control.purge() - if count == 0: - break - target_monitoring_id = uuid4().hex class MonitoringIdStampingVisitor(StampingVisitor): @@ -243,13 +236,6 @@ def on_signature(self, sig, **headers) -> dict: # not match the task's stamps, allowing those tasks to proceed successfully. worker_state.revoked_stamps.clear() - # Try to purge the queue after we're done - # to attempt to avoid interference to other tests - while True: - count = manager.app.control.purge() - if count == 0: - break - def test_revoked_by_headers_complex_canvas(self, manager, subtests): """Testing revoking of task using a stamped header""" try: @@ -261,10 +247,7 @@ def test_revoked_by_headers_complex_canvas(self, manager, subtests): # Try to purge the queue before we start # to attempt to avoid interference from other tests - while True: - count = manager.app.control.purge() - if count == 0: - break + manager.wait_until_idle() target_monitoring_id = isinstance(monitoring_id, list) and monitoring_id[0] or monitoring_id @@ -303,13 +286,6 @@ def on_signature(self, sig, **headers) -> dict: assert result.successful() is False worker_state.revoked_stamps.clear() - # Try to purge the queue after we're done - # to attempt to avoid interference to other tests - while True: - count = manager.app.control.purge() - if count == 0: - break - @flaky def test_wrong_arguments(self, manager): """Tests that proper exceptions are raised when task is called with wrong arguments.""" From 87b3617be4b320017c504aed9c39e97071f822c0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 30 Jul 2023 17:36:14 +0300 Subject: [PATCH 0453/1051] Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) --- CONTRIBUTORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index acf30fe403b..4b48c1f9b1f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -289,6 +289,7 @@ kronion, 2021/08/26 Gabor Boros, 2021/11/09 Tizian Seehaus, 2022/02/09 Oleh Romanovskyi, 2022/06/09 +Tomer Nosrati, 2022/07/17 JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 From 9dfe189dfbee4badf243ac6a6e92031cdc3cf10b Mon Sep 17 00:00:00 2001 From: Sourabh Ligade <65074119+sourabhligade@users.noreply.github.com> Date: Tue, 1 Aug 2023 17:30:27 +0530 Subject: [PATCH 0454/1051] Update README.rst (#8404) Corrected a grammatical error of an extra comma that had been added before the semicolon. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 71d07309f1a..fec138c332d 100644 --- a/README.rst +++ b/README.rst @@ -363,7 +363,7 @@ Download the latest version of Celery from PyPI: https://pypi.org/project/celery/ -You can install it by doing the following,: +You can install it by doing the following: :: From 2cde29d9fb6a8f8f805bec5d97b36bc930bcb52f Mon Sep 17 00:00:00 2001 From: Sourabh Ligade <65074119+sourabhligade@users.noreply.github.com> Date: Tue, 1 Aug 2023 20:40:51 +0530 Subject: [PATCH 0455/1051] Update README.rst (#8408) Updated the grammatical error As the sentence is having bullet points as other sentences , there should be a semicolon not a comma for better readability and Punctuation. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index fec138c332d..d673c941ff9 100644 --- a/README.rst +++ b/README.rst @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.1 runs on, +Celery version 5.3.1 runs on: - Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) From ef50442ffc6df73e92beb638dea841f72636cb17 Mon Sep 17 00:00:00 2001 From: Mathieu Kniewallner Date: Mon, 7 Aug 2023 21:15:13 +0200 Subject: [PATCH 0456/1051] fix(canvas): add group index when unrolling tasks (#8427) * test(integration): add test to assert `chord` order This integration test ensures that tasks results are received in the same order as they were created when received by the callback. * fix(canvas): add group index when unrolling tasks When using `chord`, tasks results are not received in the order they were created. Setting the group index when unrolling tasks ensure that it is the case. --- celery/canvas.py | 4 ++-- t/integration/test_canvas.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 4368d8f68bc..b0283657855 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1704,7 +1704,7 @@ def _prepared(self, tasks, partial_args, group_id, root_id, app, generator: A generator for the unrolled group tasks. The generator yields tuples of the form ``(task, AsyncResult, group_id)``. """ - for task in tasks: + for index, task in enumerate(tasks): if isinstance(task, CallableSignature): # local sigs are always of type Signature, and we # clone them to make sure we don't modify the originals. @@ -1721,7 +1721,7 @@ def _prepared(self, tasks, partial_args, group_id, root_id, app, else: if partial_args and not task.immutable: task.args = tuple(partial_args) + tuple(task.args) - yield task, task.freeze(group_id=group_id, root_id=root_id), group_id + yield task, task.freeze(group_id=group_id, root_id=root_id, group_index=index), group_id def _apply_tasks(self, tasks, producer=None, app=None, p=None, add_to_parent=None, chord=None, diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 56266c5075c..0c378f6785d 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1519,6 +1519,18 @@ def apply_chord_incr_with_sleep(self, *args, **kwargs): result = c() assert result.get(timeout=TIMEOUT) == 4 + def test_chord_order(self, manager): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + inputs = [i for i in range(10)] + + c = chord((identity.si(i) for i in inputs), identity.s()) + result = c() + assert result.get() == inputs + @pytest.mark.xfail(reason="async_results aren't performed in async way") def test_redis_subscribed_channels_leak(self, manager): if not manager.app.conf.result_backend.startswith('redis'): From d1273afd01bd76006606b2c4c65a3e45e29912a3 Mon Sep 17 00:00:00 2001 From: Ben Robinson Date: Tue, 8 Aug 2023 16:43:42 -0400 Subject: [PATCH 0457/1051] fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) --- celery/beat.py | 5 ++++- t/unit/app/test_beat.py | 23 +++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/celery/beat.py b/celery/beat.py index a3d13adafb3..76e44721e14 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -282,7 +282,10 @@ def apply_entry(self, entry, producer=None): error('Message Error: %s\n%s', exc, traceback.format_stack(), exc_info=True) else: - debug('%s sent. id->%s', entry.task, result.id) + if result and hasattr(result, 'id'): + debug('%s sent. id->%s', entry.task, result.id) + else: + debug('%s sent.', entry.task) def adjust(self, n, drift=-0.010): if n and n > 0: diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 64dad3e8f2d..082aeb3a5ef 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -301,6 +301,29 @@ def test_info(self): scheduler = mScheduler(app=self.app) assert isinstance(scheduler.info, str) + def test_apply_entry_handles_empty_result(self): + s = mScheduler(app=self.app) + entry = s.Entry(name='a name', task='foo', app=self.app) + + with patch.object(s, 'apply_async') as mock_apply_async: + with patch("celery.beat.debug") as mock_debug: + mock_apply_async.return_value = None + s.apply_entry(entry) + mock_debug.assert_called_once_with('%s sent.', entry.task) + + with patch.object(s, 'apply_async') as mock_apply_async: + with patch("celery.beat.debug") as mock_debug: + mock_apply_async.return_value = object() + s.apply_entry(entry) + mock_debug.assert_called_once_with('%s sent.', entry.task) + + task_id = 'taskId123456' + with patch.object(s, 'apply_async') as mock_apply_async: + with patch("celery.beat.debug") as mock_debug: + mock_apply_async.return_value = self.app.AsyncResult(task_id) + s.apply_entry(entry) + mock_debug.assert_called_once_with('%s sent. id->%s', entry.task, task_id) + def test_maybe_entry(self): s = mScheduler(app=self.app) entry = s.Entry(name='add every', task='tasks.add', app=self.app) From 7b4c4c3938385a994c346f6fa80ce87f4efc0001 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 9 Aug 2023 01:17:58 +0300 Subject: [PATCH 0458/1051] Lint fixes & pre-commit autoupdate (#8414) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fixed lint issues * - [github.com/asottile/pyupgrade: v3.9.0 → v3.10.1](https://github.com/asottile/pyupgrade/compare/v3.9.0...v3.10.1) - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) * Excluded celery/app/task.py and celery/backends/cache.py from yesqa pre-commit --- .pre-commit-config.yaml | 5 +++-- celery/app/task.py | 2 +- celery/backends/cache.py | 2 +- celery/utils/term.py | 2 +- t/unit/backends/test_mongodb.py | 4 ++-- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7d59589adc6..322b3d91fd5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.9.0 + rev: v3.10.1 hooks: - id: pyupgrade args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 @@ -14,6 +14,7 @@ repos: rev: v1.5.0 hooks: - id: yesqa + exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 diff --git a/celery/app/task.py b/celery/app/task.py index 021a32aa8cc..de290ae6035 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -1127,7 +1127,7 @@ def _get_exec_options(self): return self._exec_options @property - def backend(self): + def backend(self): # noqa: F811 backend = self._backend if backend is None: return self.app.backend diff --git a/celery/backends/cache.py b/celery/backends/cache.py index 7d17837ffd7..ad79383c455 100644 --- a/celery/backends/cache.py +++ b/celery/backends/cache.py @@ -47,7 +47,7 @@ def get_best_memcache(*args, **kwargs): Client = _Client = memcache.Client if not is_pylibmc: - def Client(*args, **kwargs): + def Client(*args, **kwargs): # noqa: F811 kwargs.pop('behaviors', None) return _Client(*args, **kwargs) diff --git a/celery/utils/term.py b/celery/utils/term.py index d7ab5cae625..a2eff996333 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -168,7 +168,7 @@ def supports_images(): def _read_as_base64(path): with codecs.open(path, mode='rb') as fh: encoded = base64.b64encode(fh.read()) - return encoded if type(encoded) == 'str' else encoded.decode('ascii') + return encoded if isinstance(encoded, str) else encoded.decode('ascii') def imgcat(path, inline=1, preserve_aspect_ratio=0, **kwargs): diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index ae0b85d22ee..6f74b42125f 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -730,7 +730,7 @@ def test_encode_success_results(self, mongo_backend_factory, serializer, backend = mongo_backend_factory(serializer=serializer) backend.store_result(TASK_ID, result, 'SUCCESS') recovered = backend.get_result(TASK_ID) - assert type(recovered) == result_type + assert isinstance(recovered, result_type) assert recovered == result @pytest.mark.parametrize("serializer", @@ -754,5 +754,5 @@ def test_encode_exception_error_results(self, mongo_backend_factory, traceback = 'Traceback:\n Exception: Basic Exception\n' backend.store_result(TASK_ID, exception, 'FAILURE', traceback) recovered = backend.get_result(TASK_ID) - assert type(recovered) == type(exception) + assert isinstance(recovered, type(exception)) assert recovered.args == exception.args From b33df947f2a57cca6a3c9043f144ac26a72a08a4 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 13 Aug 2023 13:24:37 +0600 Subject: [PATCH 0459/1051] Update auth.txt (#8435) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ba3e03ecf9e..988a9e635d9 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.2 +cryptography==41.0.3 From 53366a3c66a7609f3c0792699fd966d949385e5d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 17:45:12 +0000 Subject: [PATCH 0460/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.4.1 → v1.5.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.4.1...v1.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 322b3d91fd5..85418a758cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.4.1 + rev: v1.5.0 hooks: - id: mypy pass_filenames: false From a08091c2df2554b497157976d0ac908cf5a6f0a4 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 15 Aug 2023 10:30:21 +0600 Subject: [PATCH 0461/1051] Update mypy on test.txt (#8438) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index cbef5bd9126..f31cf7888f5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,7 +7,7 @@ pytest-order==1.1.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.4.1; platform_python_implementation=="CPython" +mypy==1.5.0; platform_python_implementation=="CPython" pre-commit==3.3.3 -r extras/yaml.txt -r extras/msgpack.txt From 7d31b43fb4ccd1e99593eed7497c0a654c9b97b1 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 15 Aug 2023 15:49:32 +0600 Subject: [PATCH 0462/1051] added missing kwargs arguments in some cli cmd (#8049) --- celery/bin/celery.py | 2 +- celery/bin/multi.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index dfe8c7f2d60..15558813b0b 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -173,7 +173,7 @@ def celery(ctx, app, broker, result_backend, loader, config, workdir, @celery.command(cls=CeleryCommand) @click.pass_context -def report(ctx): +def report(ctx, **kwargs): """Shows information useful to include in bug-reports.""" app = ctx.obj.app app.loader.import_default_modules() diff --git a/celery/bin/multi.py b/celery/bin/multi.py index 3a9e026b88a..360c38693a8 100644 --- a/celery/bin/multi.py +++ b/celery/bin/multi.py @@ -469,7 +469,7 @@ def DOWN(self): ) @click.pass_context @handle_preload_options -def multi(ctx): +def multi(ctx, **kwargs): """Start multiple worker instances.""" cmd = MultiTool(quiet=ctx.obj.quiet, no_color=ctx.obj.no_color) # In 4.x, celery multi ignores the global --app option. From 5754f14542ffff286206cc29cbd52d2a0d463ab5 Mon Sep 17 00:00:00 2001 From: Thomas Fossati Date: Wed, 16 Aug 2023 13:41:33 +0200 Subject: [PATCH 0463/1051] Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) * Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend * Add testcase on _get_result_meta format_date for PR #8391 * #8432 : Fix lint error E721 --- celery/backends/mongodb.py | 3 ++- t/unit/backends/test_base.py | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py index 654ca3710c9..c64fe380807 100644 --- a/celery/backends/mongodb.py +++ b/celery/backends/mongodb.py @@ -182,7 +182,8 @@ def _store_result(self, task_id, result, state, traceback=None, request=None, **kwargs): """Store return value and state of an executed task.""" meta = self._get_result_meta(result=self.encode(result), state=state, - traceback=traceback, request=request) + traceback=traceback, request=request, + format_date=False) # Add the _id for mongodb meta['_id'] = task_id diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index 1a355d3c3ef..f2ede1503e2 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -176,6 +176,30 @@ def test_get_result_meta_with_none(self): assert meta['kwargs'] == kwargs assert meta['queue'] == 'celery' + def test_get_result_meta_format_date(self): + import datetime + self.app.conf.result_extended = True + b1 = BaseBackend(self.app) + args = ['a', 'b'] + kwargs = {'foo': 'bar'} + + request = Context(args=args, kwargs=kwargs) + meta = b1._get_result_meta(result={'fizz': 'buzz'}, + state=states.SUCCESS, traceback=None, + request=request, format_date=True) + assert isinstance(meta['date_done'], str) + + self.app.conf.result_extended = True + b2 = BaseBackend(self.app) + args = ['a', 'b'] + kwargs = {'foo': 'bar'} + + request = Context(args=args, kwargs=kwargs) + meta = b2._get_result_meta(result={'fizz': 'buzz'}, + state=states.SUCCESS, traceback=None, + request=request, format_date=False) + assert isinstance(meta['date_done'], datetime.datetime) + class test_BaseBackend_interface: From 1b57078a925843a117960f3561b8b210d65df9ad Mon Sep 17 00:00:00 2001 From: paradox-lab <57354735+paradox-lab@users.noreply.github.com> Date: Thu, 17 Aug 2023 16:45:20 +0800 Subject: [PATCH 0464/1051] rewrite out-of-date code (#8441) from 2.X syntax to 3.X syntax. --- docs/userguide/tasks.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 6f9ceed528f..1fc99c39962 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1672,12 +1672,12 @@ when calling ``apply_async``. return x + y # No result will be stored - result = mytask.apply_async(1, 2, ignore_result=True) - print result.get() # -> None + result = mytask.apply_async((1, 2), ignore_result=True) + print(result.get()) # -> None # Result will be stored - result = mytask.apply_async(1, 2, ignore_result=False) - print result.get() # -> 3 + result = mytask.apply_async((1, 2), ignore_result=False) + print(result.get()) # -> 3 By default tasks will *not ignore results* (``ignore_result=False``) when a result backend is configured. From 5a724ac66d0a18c24394005180696f4cf8ff7ed9 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Thu, 17 Aug 2023 20:32:58 +0300 Subject: [PATCH 0465/1051] Limit redis client to 4.x since 5.x fails the test suite. (#8442) --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index 5bb61f15f33..ef3addb0bd9 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.5.2,!=4.5.5 +redis>=4.5.2,<5.0.0,!=4.5.5 From 372a7a38c1dcf5f893e78ef034b864099fed35bb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 19 Aug 2023 15:58:30 +0300 Subject: [PATCH 0466/1051] Tox v4.9 has fixed a bug where unsupported environments did not raise an error but now they do. As our tox.ini only implement unit tests config for tox-gh-actions, since tox v4.9 our integration tests fail on GitHub. This change limits tox to v4.9 until we can fix it correctly as it breaks the testing environment for now --- .github/workflows/python-package.yml | 4 ++-- t/integration/test_canvas.py | 2 +- tox.ini | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 00ee177e685..e32f5d71465 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -51,7 +51,7 @@ jobs: cache-dependency-path: '**/setup.py' - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions + run: python -m pip install --upgrade pip 'tox<4.9' tox-gh-actions - name: > Run tox for "${{ matrix.python-version }}-unit" @@ -107,7 +107,7 @@ jobs: cache: 'pip' cache-dependency-path: '**/setup.py' - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions + run: python -m pip install --upgrade pip 'tox<4.9' tox-gh-actions - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 0c378f6785d..1d7370317f1 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -506,7 +506,7 @@ def test_chain_of_a_chord_and_three_tasks_and_a_group(self, manager): assert res.get(timeout=TIMEOUT) == [8, 8] @pytest.mark.xfail(raises=TimeoutError, reason="Task is timeout") - def test_nested_chain_group_lone(self, manager): + def test_nested_chain_group_lone(self, manager): # Fails with Redis 5.x """ Test that a lone group in a chain completes. """ diff --git a/tox.ini b/tox.ini index 59d3676d1e3..7993bfb80b8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,6 @@ [tox] requires = + tox<4.9 tox-gh-actions envlist = {3.8,3.9,3.10,3.11,pypy3}-unit From dabccf03e8ce0d98b3d3e3da8f95fb8e3530083d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 21 Aug 2023 18:05:04 +0300 Subject: [PATCH 0467/1051] =?UTF-8?q?Fixed=20issue:=20Flags=20broker=5Fcon?= =?UTF-8?q?nection=5Fretry=5Fon=5Fstartup=20&=20broker=5Fconnection=5Fretr?= =?UTF-8?q?y=20aren=E2=80=99t=20reliable=20#8433=20(#8446)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable #8433 * Enhanced unit test: test_ensure_connected --- celery/worker/consumer/consumer.py | 24 ++++++++++++++++++++---- t/unit/worker/test_consumer.py | 9 +++++---- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index c10c9aeb578..e072ef57870 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -153,6 +153,10 @@ class Consumer: restart_count = -1 # first start is the same as a restart + #: This flag will be turned off after the first failed + #: connection attempt. + first_connection_attempt = True + class Blueprint(bootsteps.Blueprint): """Consumer blueprint.""" @@ -337,7 +341,8 @@ def start(self): except recoverable_errors as exc: # If we're not retrying connections, we need to properly shutdown or terminate # the Celery main process instead of abruptly aborting the process without any cleanup. - is_connection_loss_on_startup = self.restart_count == 0 + is_connection_loss_on_startup = self.first_connection_attempt + self.first_connection_attempt = False connection_retry_type = self._get_connection_retry_type(is_connection_loss_on_startup) connection_retry = self.app.conf[connection_retry_type] if not connection_retry: @@ -488,13 +493,17 @@ def _error_handler(exc, interval, next_step=CONNECTION_RETRY_STEP): # Remember that the connection is lazy, it won't establish # until needed. - # If broker_connection_retry_on_startup is not set, revert to broker_connection_retry - # to determine whether connection retries are disabled. # TODO: Rely only on broker_connection_retry_on_startup to determine whether connection retries are disabled. # We will make the switch in Celery 6.0. + retry_disabled = False + if self.app.conf.broker_connection_retry_on_startup is None: + # If broker_connection_retry_on_startup is not set, revert to broker_connection_retry + # to determine whether connection retries are disabled. + retry_disabled = not self.app.conf.broker_connection_retry + warnings.warn( CPendingDeprecationWarning( f"The broker_connection_retry configuration setting will no longer determine\n" @@ -502,16 +511,23 @@ def _error_handler(exc, interval, next_step=CONNECTION_RETRY_STEP): f"If you wish to retain the existing behavior for retrying connections on startup,\n" f"you should set broker_connection_retry_on_startup to {self.app.conf.broker_connection_retry}.") ) + else: + if self.first_connection_attempt: + retry_disabled = not self.app.conf.broker_connection_retry_on_startup + else: + retry_disabled = not self.app.conf.broker_connection_retry - if not self.app.conf.broker_connection_retry and not self.app.conf.broker_connection_retry_on_startup: + if retry_disabled: # Retry disabled, just call connect directly. conn.connect() + self.first_connection_attempt = False return conn conn = conn.ensure_connection( _error_handler, self.app.conf.broker_connection_max_retries, callback=maybe_shutdown, ) + self.first_connection_attempt = False return conn def _flush_events(self): diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index eb872ab7a62..c7e80a0c7de 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -422,8 +422,11 @@ def test_cancel_long_running_tasks_on_connection_loss__warning(self): @pytest.mark.parametrize("broker_connection_retry", [True, False]) @pytest.mark.parametrize("broker_connection_retry_on_startup", [None, False]) - def test_ensure_connected(self, subtests, broker_connection_retry, broker_connection_retry_on_startup): + @pytest.mark.parametrize("first_connection_attempt", [True, False]) + def test_ensure_connected(self, subtests, broker_connection_retry, broker_connection_retry_on_startup, + first_connection_attempt): c = self.get_consumer() + c.first_connection_attempt = first_connection_attempt c.app.conf.broker_connection_retry_on_startup = broker_connection_retry_on_startup c.app.conf.broker_connection_retry = broker_connection_retry @@ -457,9 +460,7 @@ def test_start_raises_connection_error(self, is_connection_loss_on_startup, caplog, subtests): c = self.get_consumer() - # in order to reproduce the actual behavior: if this is the startup, then restart count has not been - # incremented yet, and is therefore -1. - c.restart_count = -1 if is_connection_loss_on_startup else 1 + c.first_connection_attempt = True if is_connection_loss_on_startup else False c.app.conf['broker_connection_retry'] = False c.app.conf['broker_connection_retry_on_startup'] = broker_connection_retry_on_startup c.blueprint.start.side_effect = ConnectionError() From 36c72ef096c815201930ca8400b1456d2ec01e7e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 00:17:36 +0300 Subject: [PATCH 0468/1051] [pre-commit.ci] pre-commit autoupdate (#8450) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.0 → v1.5.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.0...v1.5.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 85418a758cb..8be176b4c41 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.0 + rev: v1.5.1 hooks: - id: mypy pass_filenames: false From 8ae0b229596cc8aeea4fb71020d9358a59338e08 Mon Sep 17 00:00:00 2001 From: rainnnnny Date: Tue, 22 Aug 2023 17:41:15 +0800 Subject: [PATCH 0469/1051] doc update from #7651 --- docs/getting-started/backends-and-brokers/redis.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index e7760762c8f..1924cb5dba2 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -164,12 +164,14 @@ a more distant future, database-backed periodic task might be a better choice. Periodic tasks won't be affected by the visibility timeout, as this is a concept separate from ETA/countdown. -You can increase this timeout by configuring a transport option +You can increase this timeout by configuring several options with the same name: .. code-block:: python app.conf.broker_transport_options = {'visibility_timeout': 43200} + app.conf.result_backend_transport_options = {'visibility_timeout': 43200} + app.conf.visibility_timeout = 43200 The value must be an int describing the number of seconds. From af1d7a18ec98b32e70cc17e3e17ee82d17efbd14 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 30 Aug 2023 18:55:14 +0300 Subject: [PATCH 0470/1051] Removed tox<4.9 limit (#8464) --- .github/workflows/python-package.yml | 4 ++-- tox.ini | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e32f5d71465..40af8568391 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -51,7 +51,7 @@ jobs: cache-dependency-path: '**/setup.py' - name: Install tox - run: python -m pip install --upgrade pip 'tox<4.9' tox-gh-actions + run: python -m pip install --upgrade pip 'tox' tox-gh-actions - name: > Run tox for "${{ matrix.python-version }}-unit" @@ -107,7 +107,7 @@ jobs: cache: 'pip' cache-dependency-path: '**/setup.py' - name: Install tox - run: python -m pip install --upgrade pip 'tox<4.9' tox-gh-actions + run: python -m pip install --upgrade pip 'tox' tox-gh-actions - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" diff --git a/tox.ini b/tox.ini index 7993bfb80b8..59d3676d1e3 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,5 @@ [tox] requires = - tox<4.9 tox-gh-actions envlist = {3.8,3.9,3.10,3.11,pypy3}-unit From 98f99e1421e456971010e043c4fd9226daa33d9b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 30 Aug 2023 20:28:22 +0300 Subject: [PATCH 0471/1051] Fixed AttributeError: 'str' object has no attribute (#8463) * Fixed looping through tasks instead of going through the tasks's dict keys. Prevents error like: AttributeError: 'str' object has no attribute 'link_error' * Added automatic tests * Fixed looping through link/link_error instead of going through dict keys. Prevents error like: AttributeError: 'str' object has no attribute '_app' * Added automatic tests --- celery/canvas.py | 10 +++--- t/integration/test_canvas.py | 61 ++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 5 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index b0283657855..a4007f0a27f 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -653,7 +653,7 @@ def stamp_links(self, visitor, append_stamps=False, **headers): # Stamp all of the callbacks of this signature headers = deepcopy(non_visitor_headers) - for link in self.options.get('link', []) or []: + for link in maybe_list(self.options.get('link')) or []: link = maybe_signature(link, app=self.app) visitor_headers = None if visitor is not None: @@ -668,7 +668,7 @@ def stamp_links(self, visitor, append_stamps=False, **headers): # Stamp all of the errbacks of this signature headers = deepcopy(non_visitor_headers) - for link in self.options.get('link_error', []) or []: + for link in maybe_list(self.options.get('link_error')) or []: link = maybe_signature(link, app=self.app) visitor_headers = None if visitor is not None: @@ -1016,9 +1016,9 @@ def unchain_tasks(self): # Clone chain's tasks assigning signatures from link_error # to each task and adding the chain's links to the last task. tasks = [t.clone() for t in self.tasks] - for sig in self.options.get('link', []): + for sig in maybe_list(self.options.get('link')) or []: tasks[-1].link(sig) - for sig in self.options.get('link_error', []): + for sig in maybe_list(self.options.get('link_error')) or []: for task in tasks: task.link_error(sig) return tasks @@ -2272,7 +2272,7 @@ def link_error(self, errback): applied to the body. """ if self.app.conf.task_allow_error_cb_on_chord_header: - for task in self.tasks: + for task in maybe_list(self.tasks) or []: task.link_error(errback.clone(immutable=True)) else: # Once this warning is removed, the whole method needs to be refactored to: diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 1d7370317f1..d758e97bd4a 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2962,6 +2962,43 @@ def test_flag_allow_error_cb_on_chord_header_on_upgraded_chord(self, manager, su # Cleanup redis_connection.delete(errback_key) + def test_upgraded_chord_link_error_with_header_errback_enabled(self, manager, subtests): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + redis_connection = get_redis_connection() + + manager.app.conf.task_allow_error_cb_on_chord_header = True + + body_msg = 'chord body called' + body_key = 'echo_body' + body_sig = redis_echo.si(body_msg, redis_key=body_key) + + errback_msg = 'errback called' + errback_key = 'echo_errback' + errback_sig = redis_echo.si(errback_msg, redis_key=errback_key) + + redis_connection.delete(errback_key, body_key) + + sig = chain( + identity.si(42), + group( + fail.si(), + fail.si(), + ), + body_sig, + ).on_error(errback_sig) + + with subtests.test(msg='Error propagates from failure in header'): + with pytest.raises(ExpectedException): + sig.apply_async().get(timeout=TIMEOUT) + + redis_connection.delete(errback_key, body_key) + class test_signature_serialization: """ @@ -3441,3 +3478,27 @@ def on_signature(self, sig, **headers) -> dict: res = stamped_task.delay() res.get(timeout=TIMEOUT) assert assertion_result + + def test_stamp_canvas_with_dictionary_link(self, manager, subtests): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"on_signature": 42} + + with subtests.test("Stamp canvas with dictionary link"): + canvas = identity.si(42) + canvas.options["link"] = dict(identity.si(42)) + canvas.stamp(visitor=CustomStampingVisitor()) + + def test_stamp_canvas_with_dictionary_link_error(self, manager, subtests): + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return {"on_signature": 42} + + with subtests.test("Stamp canvas with dictionary link error"): + canvas = fail.si() + canvas.options["link_error"] = dict(fail.si()) + canvas.stamp(visitor=CustomStampingVisitor()) + + with subtests.test(msg='Expect canvas to fail'): + with pytest.raises(ExpectedException): + canvas.apply_async().get(timeout=TIMEOUT) From 735a700ee716a08dcf3414316347c2963d90c32c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 31 Aug 2023 13:26:44 +0300 Subject: [PATCH 0472/1051] Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) --- requirements/default.txt | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index af4f8b35bca..7f24bdc0c06 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.1.0,<5.0 -kombu>=5.3.1,<6.0 +kombu>=5.3.2,<6.0 vine>=5.0.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 diff --git a/setup.cfg b/setup.cfg index a1fc752e35a..a452ae09a64 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,7 +35,7 @@ per-file-ignores = requires = backports.zoneinfo>=0.2.1;python_version<'3.9' tzdata>=2022.7 billiard >=4.1.0,<5.0 - kombu >= 5.3.1,<6.0.0 + kombu >= 5.3.2,<6.0.0 [bdist_wheel] universal = 0 From c01172f47c392b253bb124e02934e7466a0202e6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 31 Aug 2023 13:34:59 +0300 Subject: [PATCH 0473/1051] =?UTF-8?q?Bump=20version:=205.3.1=20=E2=86=92?= =?UTF-8?q?=205.3.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- Changelog.rst | 44 +++++++++++++++++++++++++++++++--- README.rst | 6 ++--- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 47 insertions(+), 9 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b44605ec017..a12ee098900 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.1 +current_version = 5.3.2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index cea8615026c..3ffb0e8748c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,47 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.3.2: + +5.3.2 +===== + +:release-date: 2023-08-31 1:30 P.M GMT+2 +:release-by: Tomer Nosrati + +- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) +- User guide fix (hotfix for #1755) (#8342) +- store children with database backend (#8338) +- Stamping bugfix with group/chord header errback linking (#8347) +- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) +- Fixing minor typo in code example in calling.rst (#8366) +- add documents for timeout settings (#8373) +- fix: copyright year (#8380) +- setup.py: enable include_package_data (#8379) +- Fix eager tasks does not populate name field (#8383) +- Update test.txt dependencies (#8389) +- Update auth.txt deps (#8392) +- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) +- Support preload options for shell and purge commands (#8374) +- Implement safer ArangoDB queries (#8351) +- integration test: cleanup worker after test case (#8361) +- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) +- Update README.rst (#8404) +- Update README.rst (#8408) +- fix(canvas): add group index when unrolling tasks (#8427) +- fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) +- Update auth.txt (#8435) +- Update mypy on test.txt (#8438) +- added missing kwargs arguments in some cli cmd (#8049) +- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) +- Docs: rewrite out-of-date code (#8441) +- Limit redis client to 4.x since 5.x fails the test suite (#8442) +- Limit tox to < 4.9 (#8443) +- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable #8433 (#8446) +- doc update from #7651 (#8451) +- Remove tox version limit (#8464) +- Fixed AttributeError: 'str' object has no attribute (#8463) +- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) .. _version-5.3.1: @@ -28,8 +69,6 @@ an overview of what's new in Celery 5.3. - Fixed a small float value of retry_backoff (#8295). - Limit pyro4 up to python 3.10 only as it is (#8324). - - .. _version-5.3.0: 5.3.0 @@ -42,7 +81,6 @@ an overview of what's new in Celery 5.3. - Update librabbitmq.txt > 2.0.0 (#8292). - Upgrade syntax to py3.8 (#8281). - .. _version-5.3.0rc2: 5.3.0rc2 diff --git a/README.rst b/README.rst index d673c941ff9..b3e36beaabd 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.1 (emerald-rush) +:Version: 5.3.2 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.1 runs on: +Celery version 5.3.2 runs on: - Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.1 coming from previous versions then you should read our +new to Celery v5.3.2 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index 1bff85d8bdf..294861cf9ca 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.1' +__version__ = '5.3.2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 509854d4595..e4ff71c76bc 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.1 (emerald-rush) +:Version: 5.3.2 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 9aa07ede98ab0373583c3cb5ec0413a4569d4e31 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 31 Aug 2023 13:50:18 +0300 Subject: [PATCH 0474/1051] =?UTF-8?q?Bump=20version:=205.3.2=20=E2=86=92?= =?UTF-8?q?=205.3.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- Changelog.rst | 99 ++++++++++++++++++++++------------ README.rst | 6 +-- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 71 insertions(+), 40 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a12ee098900..4f74ddd02fd 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.2 +current_version = 5.3.3 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 3ffb0e8748c..17de7809913 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,47 +8,78 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.3.3: + +v5.3.3 +===== + +:release-date: 2023-08-31 1:47 P.M GMT+2 +:release-by: Tomer Nosrati + +* Fixed changelog for 5.3.2 release docs. + .. _version-5.3.2: -5.3.2 +v5.3.2 ===== :release-date: 2023-08-31 1:30 P.M GMT+2 :release-by: Tomer Nosrati -- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) -- User guide fix (hotfix for #1755) (#8342) -- store children with database backend (#8338) -- Stamping bugfix with group/chord header errback linking (#8347) -- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) -- Fixing minor typo in code example in calling.rst (#8366) -- add documents for timeout settings (#8373) -- fix: copyright year (#8380) -- setup.py: enable include_package_data (#8379) -- Fix eager tasks does not populate name field (#8383) -- Update test.txt dependencies (#8389) -- Update auth.txt deps (#8392) -- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) -- Support preload options for shell and purge commands (#8374) -- Implement safer ArangoDB queries (#8351) -- integration test: cleanup worker after test case (#8361) -- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) -- Update README.rst (#8404) -- Update README.rst (#8408) -- fix(canvas): add group index when unrolling tasks (#8427) -- fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) -- Update auth.txt (#8435) -- Update mypy on test.txt (#8438) -- added missing kwargs arguments in some cli cmd (#8049) -- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) -- Docs: rewrite out-of-date code (#8441) -- Limit redis client to 4.x since 5.x fails the test suite (#8442) -- Limit tox to < 4.9 (#8443) -- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable #8433 (#8446) -- doc update from #7651 (#8451) -- Remove tox version limit (#8464) -- Fixed AttributeError: 'str' object has no attribute (#8463) -- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) +## What's Changed +* Bugfix: Removed unecessary stamping code from _chord.run() by @Nusnus in https://github.com/celery/celery/pull/8339 +* User guide fix (hotfix for #1755) by @Nusnus in https://github.com/celery/celery/pull/8342 +* store children with database backend by @aaronst in https://github.com/celery/celery/pull/8338 +* Stamping bugfix with group/chord header errback linking by @Nusnus in https://github.com/celery/celery/pull/8347 +* Use argsrepr and kwargsrepr in LOG_RECEIVED by @zhu in https://github.com/celery/celery/pull/8301 +* Fixing minor typo in code example in calling.rst by @dwysocki in https://github.com/celery/celery/pull/8366 +* add documents for timeout settings by @ooyamatakehisa in https://github.com/celery/celery/pull/8373 +* fix: copyright year by @karanganesan in https://github.com/celery/celery/pull/8380 +* setup.py: enable include_package_data by @elohmeier in https://github.com/celery/celery/pull/8379 +* Fix eager tasks does not populate name field by @KOliver94 in https://github.com/celery/celery/pull/8383 +* Update test.txt dependencies by @auvipy in https://github.com/celery/celery/pull/8389 +* Update auth.txt deps by @auvipy in https://github.com/celery/celery/pull/8392 +* Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend by @ycc140 in https://github.com/celery/celery/pull/8391 +* Support preload options for shell and purge commands by @dpdoughe in https://github.com/celery/celery/pull/8374 +* Implement safer ArangoDB queries by @othieno in https://github.com/celery/celery/pull/8351 +* integration test: cleanup worker after test case by @zhu in https://github.com/celery/celery/pull/8361 +* Added "Tomer Nosrati" to CONTRIBUTORS.txt by @Nusnus in https://github.com/celery/celery/pull/8400 +* Update README.rst by @sourabhligade in https://github.com/celery/celery/pull/8404 +* Update README.rst by @sourabhligade in https://github.com/celery/celery/pull/8408 +* fix(canvas): add group index when unrolling tasks by @mkniewallner in https://github.com/celery/celery/pull/8427 +* fix(beat): debug statement should only log AsyncResult.id if it exists by @bmrobin in https://github.com/celery/celery/pull/8428 +* Lint fixes & pre-commit autoupdate by @Nusnus in https://github.com/celery/celery/pull/8414 +* Update auth.txt by @auvipy in https://github.com/celery/celery/pull/8435 +* Update mypy on test.txt by @auvipy in https://github.com/celery/celery/pull/8438 +* added missing kwargs arguments in some cli cmd by @auvipy in https://github.com/celery/celery/pull/8049 +* Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend by @asukero in https://github.com/celery/celery/pull/8432 +* Docs: rewrite out-of-date code by @paradox-lab in https://github.com/celery/celery/pull/8441 +* Limit redis client to 4.x since 5.x fails the test suite by @thedrow in https://github.com/celery/celery/pull/8442 +* Limit tox to < 4.9 by @Nusnus in https://github.com/celery/celery/pull/8443 +* Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable #8433 by @Nusnus in https://github.com/celery/celery/pull/8446 +* doc update from #7651 by @rainnnnny in https://github.com/celery/celery/pull/8451 +* Remove tox version limit by @Nusnus in https://github.com/celery/celery/pull/8464 +* Fixed AttributeError: 'str' object has no attribute by @Nusnus in https://github.com/celery/celery/pull/8463 +* Upgraded Kombu from 5.3.1 -> 5.3.2 by @Nusnus in https://github.com/celery/celery/pull/8468 + +## New Contributors +* @aaronst made their first contribution in https://github.com/celery/celery/pull/8338 +* @zhu made their first contribution in https://github.com/celery/celery/pull/8301 +* @dwysocki made their first contribution in https://github.com/celery/celery/pull/8366 +* @ooyamatakehisa made their first contribution in https://github.com/celery/celery/pull/8373 +* @karanganesan made their first contribution in https://github.com/celery/celery/pull/8380 +* @elohmeier made their first contribution in https://github.com/celery/celery/pull/8379 +* @KOliver94 made their first contribution in https://github.com/celery/celery/pull/8383 +* @ycc140 made their first contribution in https://github.com/celery/celery/pull/8391 +* @dpdoughe made their first contribution in https://github.com/celery/celery/pull/8374 +* @othieno made their first contribution in https://github.com/celery/celery/pull/8351 +* @sourabhligade made their first contribution in https://github.com/celery/celery/pull/8404 +* @mkniewallner made their first contribution in https://github.com/celery/celery/pull/8427 +* @bmrobin made their first contribution in https://github.com/celery/celery/pull/8428 +* @asukero made their first contribution in https://github.com/celery/celery/pull/8432 +* @rainnnnny made their first contribution in https://github.com/celery/celery/pull/8451 + +**Full Changelog**: https://github.com/celery/celery/compare/v5.3.1...v5.3.2 .. _version-5.3.1: diff --git a/README.rst b/README.rst index b3e36beaabd..7b3211d3340 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.2 (emerald-rush) +:Version: 5.3.3 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.2 runs on: +Celery version 5.3.3 runs on: - Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.2 coming from previous versions then you should read our +new to Celery v5.3.3 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index 294861cf9ca..6c215c3561d 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.2' +__version__ = '5.3.3' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index e4ff71c76bc..31db9416847 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.2 (emerald-rush) +:Version: 5.3.3 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From b28ac9ccef9612802983e921572f4c29ef6151b8 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 31 Aug 2023 15:36:43 -0300 Subject: [PATCH 0475/1051] Document need for CELERY_ prefix on CLI env vars --- docs/reference/cli.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/cli.rst b/docs/reference/cli.rst index 6432b7e300a..c1ee1084985 100644 --- a/docs/reference/cli.rst +++ b/docs/reference/cli.rst @@ -2,6 +2,9 @@ Command Line Interface ======================= +.. NOTE:: The prefix `CELERY_` must be added to the names of the environment + variables described below. E.g., `APP` becomes `CELERY_APP`. + .. click:: celery.bin.celery:celery :prog: celery :nested: full From 1aff856ea37a477639ef2c8883f121d6670f72e0 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Sat, 2 Sep 2023 07:30:47 -0300 Subject: [PATCH 0476/1051] Use string value for CELERY_SKIP_CHECKS envvar (#8462) * Use string value for CELERY_SKIP_CHECKS envvar * Document the SKIP_CHECKS env var * Remove lint * Test effect of skip-checks option * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Isolate os.environ in test_cli_skip_checks --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/bin/celery.py | 5 +++-- t/unit/bin/test_worker.py | 15 +++++++++++++++ t/unit/fixups/test_django.py | 2 +- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index 15558813b0b..4aeed42597f 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -136,7 +136,8 @@ def convert(self, value, param, ctx): cls=CeleryOption, is_flag=True, help_group="Global Options", - help="Skip Django core checks on startup.") + help="Skip Django core checks on startup. Setting the SKIP_CHECKS environment " + "variable to any non-empty string will have the same effect.") @click.pass_context def celery(ctx, app, broker, result_backend, loader, config, workdir, no_color, quiet, version, skip_checks): @@ -158,7 +159,7 @@ def celery(ctx, app, broker, result_backend, loader, config, workdir, if config: os.environ['CELERY_CONFIG_MODULE'] = config if skip_checks: - os.environ['CELERY_SKIP_CHECKS'] = skip_checks + os.environ['CELERY_SKIP_CHECKS'] = 'true' ctx.obj = CLIContext(app=app, no_color=no_color, workdir=workdir, quiet=quiet) diff --git a/t/unit/bin/test_worker.py b/t/unit/bin/test_worker.py index 50a07e3b674..b63a2a03306 100644 --- a/t/unit/bin/test_worker.py +++ b/t/unit/bin/test_worker.py @@ -1,3 +1,6 @@ +import os +from unittest.mock import patch + import pytest from click.testing import CliRunner @@ -18,3 +21,15 @@ def test_cli(isolated_cli_runner: CliRunner): catch_exceptions=False ) assert res.exit_code == 1, (res, res.stdout) + + +def test_cli_skip_checks(isolated_cli_runner: CliRunner): + Logging._setup = True # To avoid hitting the logging sanity checks + with patch.dict(os.environ, clear=True): + res = isolated_cli_runner.invoke( + celery, + ["-A", "t.unit.bin.proj.app", "--skip-checks", "worker", "--pool", "solo"], + catch_exceptions=False, + ) + assert res.exit_code == 1, (res, res.stdout) + assert os.environ["CELERY_SKIP_CHECKS"] == "true", "should set CELERY_SKIP_CHECKS" diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 07f94c6b813..8a97884ed4a 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -272,7 +272,7 @@ def test_validate_models(self, patching, module): f.django_setup.reset_mock() run_checks.reset_mock() - patching.setenv('CELERY_SKIP_CHECKS', True) + patching.setenv('CELERY_SKIP_CHECKS', 'true') f.validate_models() f.django_setup.assert_called_with() run_checks.assert_not_called() From 74c8bf7f2f7c310fb23858b9b6aebce787d50353 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 2 Sep 2023 17:31:54 +0600 Subject: [PATCH 0477/1051] Revert "fix(backends.database): store children" (#8475) This reverts commit 51b28461d0d8b2fdf7db8a7cd2368ba11222bb6d. --- celery/backends/database/models.py | 2 -- t/unit/backends/test_database.py | 1 - 2 files changed, 3 deletions(-) diff --git a/celery/backends/database/models.py b/celery/backends/database/models.py index f2a56965ccf..1c766b51ca4 100644 --- a/celery/backends/database/models.py +++ b/celery/backends/database/models.py @@ -25,7 +25,6 @@ class Task(ResultModelBase): date_done = sa.Column(sa.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True) traceback = sa.Column(sa.Text, nullable=True) - children = sa.Column(PickleType, nullable=True) def __init__(self, task_id): self.task_id = task_id @@ -37,7 +36,6 @@ def to_dict(self): 'result': self.result, 'traceback': self.traceback, 'date_done': self.date_done, - 'children': self.children, } def __repr__(self): diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index a5d11b18c65..a693f383f67 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -99,7 +99,6 @@ def test_missing_task_meta_is_dict_with_pending(self): assert meta['task_id'] == 'xxx-does-not-exist-at-all' assert meta['result'] is None assert meta['traceback'] is None - assert meta['children'] is None def test_mark_as_done(self): tb = DatabaseBackend(self.uri, app=self.app) From c08e811b383f72157b98e21e178c5c42762d671d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sat, 2 Sep 2023 17:50:49 +0600 Subject: [PATCH 0478/1051] Revert "Fix eager tasks does not populate name field (#8383)" (#8476) This reverts commit 1c363876147325a196c474e757e355c451a0cdff. --- celery/app/task.py | 3 +-- celery/result.py | 4 +--- t/unit/tasks/test_chord.py | 2 +- t/unit/tasks/test_result.py | 16 ++++++++-------- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index de290ae6035..7998d600b76 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -788,7 +788,6 @@ def apply(self, args=None, kwargs=None, request = { 'id': task_id, - 'task': self.name, 'retries': retries, 'is_eager': True, 'logfile': logfile, @@ -825,7 +824,7 @@ def apply(self, args=None, kwargs=None, if isinstance(retval, Retry) and retval.sig is not None: return retval.sig.apply(retries=retries + 1) state = states.SUCCESS if ret.info is None else ret.info.state - return EagerResult(task_id, self.name, retval, state, traceback=tb) + return EagerResult(task_id, retval, state, traceback=tb) def AsyncResult(self, task_id, **kwargs): """Get AsyncResult instance for the specified task. diff --git a/celery/result.py b/celery/result.py index 4c12e3edde7..0c9e0a30f21 100644 --- a/celery/result.py +++ b/celery/result.py @@ -983,11 +983,10 @@ def restore(cls, id, backend=None, app=None): class EagerResult(AsyncResult): """Result that we know has already been executed.""" - def __init__(self, id, name, ret_value, state, traceback=None): + def __init__(self, id, ret_value, state, traceback=None): # pylint: disable=super-init-not-called # XXX should really not be inheriting from AsyncResult self.id = id - self._name = name self._result = ret_value self._state = state self._traceback = traceback @@ -1039,7 +1038,6 @@ def __repr__(self): @property def _cache(self): return { - 'name': self._name, 'task_id': self.id, 'result': self._result, 'status': self._state, diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py index acd5344d7cb..e44c0af4b67 100644 --- a/t/unit/tasks/test_chord.py +++ b/t/unit/tasks/test_chord.py @@ -46,7 +46,7 @@ def join(self, propagate=True, **kwargs): def _failed_join_report(self): for value in self.value: if isinstance(value, Exception): - yield EagerResult('some_id', 'test-task', value, 'FAILURE') + yield EagerResult('some_id', value, 'FAILURE') class TSRNoReport(TSR): diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 814db338f85..42eaab8987d 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -136,7 +136,7 @@ def test_reduce_direct(self): def test_children(self): x = self.app.AsyncResult('1') - children = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] + children = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] x._cache = {'children': children, 'status': states.SUCCESS} x.backend = Mock() assert x.children @@ -147,12 +147,12 @@ def test_propagates_for_parent(self): x.backend = Mock(name='backend') x.backend.get_task_meta.return_value = {} x.backend.wait_for_pending.return_value = 84 - x.parent = EagerResult(uuid(), 'test-task', KeyError('foo'), states.FAILURE) + x.parent = EagerResult(uuid(), KeyError('foo'), states.FAILURE) with pytest.raises(KeyError): x.get(propagate=True) x.backend.wait_for_pending.assert_not_called() - x.parent = EagerResult(uuid(), 'test-task', 42, states.SUCCESS) + x.parent = EagerResult(uuid(), 42, states.SUCCESS) assert x.get(propagate=True) == 84 x.backend.wait_for_pending.assert_called() @@ -172,7 +172,7 @@ def test_get_children(self): def test_build_graph_get_leaf_collect(self): x = self.app.AsyncResult('1') x.backend._cache['1'] = {'status': states.SUCCESS, 'result': None} - c = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] + c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] x.iterdeps = Mock() x.iterdeps.return_value = ( (None, x), @@ -194,7 +194,7 @@ def test_build_graph_get_leaf_collect(self): def test_iterdeps(self): x = self.app.AsyncResult('1') - c = [EagerResult(str(i), 'test-task', i, states.SUCCESS) for i in range(3)] + c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)] x._cache = {'status': states.SUCCESS, 'result': None, 'children': c} for child in c: child.backend = Mock() @@ -945,13 +945,13 @@ def test_wait_raises(self): assert res.wait(propagate=False) def test_wait(self): - res = EagerResult('x', 'test-task', 'x', states.RETRY) + res = EagerResult('x', 'x', states.RETRY) res.wait() assert res.state == states.RETRY assert res.status == states.RETRY def test_forget(self): - res = EagerResult('x', 'test-task', 'x', states.RETRY) + res = EagerResult('x', 'x', states.RETRY) res.forget() def test_revoke(self): @@ -962,7 +962,7 @@ def test_revoke(self): def test_get_sync_subtask_option(self, task_join_will_block): task_join_will_block.return_value = True tid = uuid() - res_subtask_async = EagerResult(tid, 'test-task', 'x', 'x', states.SUCCESS) + res_subtask_async = EagerResult(tid, 'x', 'x', states.SUCCESS) with pytest.raises(RuntimeError): res_subtask_async.get() res_subtask_async.get(disable_sync_subtasks=False) From 88d641c0fb0238f8e1fc68845b2693333c4e3035 Mon Sep 17 00:00:00 2001 From: Mike Lissner Date: Fri, 1 Sep 2023 21:02:00 -0700 Subject: [PATCH 0479/1051] Update Changelog.rst --- Changelog.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 17de7809913..185cfba8f41 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -26,7 +26,8 @@ v5.3.2 :release-date: 2023-08-31 1:30 P.M GMT+2 :release-by: Tomer Nosrati -## What's Changed +What's Changed +-------------- * Bugfix: Removed unecessary stamping code from _chord.run() by @Nusnus in https://github.com/celery/celery/pull/8339 * User guide fix (hotfix for #1755) by @Nusnus in https://github.com/celery/celery/pull/8342 * store children with database backend by @aaronst in https://github.com/celery/celery/pull/8338 @@ -62,7 +63,8 @@ v5.3.2 * Fixed AttributeError: 'str' object has no attribute by @Nusnus in https://github.com/celery/celery/pull/8463 * Upgraded Kombu from 5.3.1 -> 5.3.2 by @Nusnus in https://github.com/celery/celery/pull/8468 -## New Contributors +New Contributors +---------------- * @aaronst made their first contribution in https://github.com/celery/celery/pull/8338 * @zhu made their first contribution in https://github.com/celery/celery/pull/8301 * @dwysocki made their first contribution in https://github.com/celery/celery/pull/8366 From d83d0629b362993e1b14dae8eb68997303718f65 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 3 Sep 2023 11:53:13 +0600 Subject: [PATCH 0480/1051] Remove as it seems to be buggy. (#8340) After pushing a tag, we can now use github release notes generator so it is better to remove now. when it was added, github didn't had the release note generator. --- .github/workflows/changerelease.yml | 32 ----------------------------- 1 file changed, 32 deletions(-) delete mode 100644 .github/workflows/changerelease.yml diff --git a/.github/workflows/changerelease.yml b/.github/workflows/changerelease.yml deleted file mode 100644 index 91f9e7e1e5c..00000000000 --- a/.github/workflows/changerelease.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: changerelease -on: - workflow_dispatch: {} - push: - paths: [Changelog.rst] - branches: [main] - tags: ["*"] - -permissions: - contents: write - -jobs: - sync: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: docker://pandoc/core:2.14 - with: - args: "Changelog.rst -f rst -t markdown -o CR_CHANGELOG.md" - - name: "Clean up markdown" - run: | - # https://stackoverflow.com/a/1252191/1110798 - cat CR_CHANGELOG.md - sed -i -e ':a' -e 'N' -e '$!ba' -e 's/release-date\n\n: /Release date: /g' CR_CHANGELOG.md - sed -i -e ':a' -e 'N' -e '$!ba' -e 's/release-by\n\n: /Release by: /g' CR_CHANGELOG.md - cat CR_CHANGELOG.md - - uses: dropseed/changerelease@v1 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - changelog: CR_CHANGELOG.md - remote_changelog: false - limit: -1 From a683b3624ef9a711593d26a45e5d004733001a7e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 3 Sep 2023 12:02:01 +0600 Subject: [PATCH 0481/1051] Revert "Add Semgrep CI (#8201)" (#8477) This reverts commit f28047ac05f2445acf0626419bfa53b0df089f38. --- .github/workflows/semgrep.yml | 23 ----------------------- 1 file changed, 23 deletions(-) delete mode 100644 .github/workflows/semgrep.yml diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml deleted file mode 100644 index 88d6d45d5a4..00000000000 --- a/.github/workflows/semgrep.yml +++ /dev/null @@ -1,23 +0,0 @@ -on: - pull_request: {} - push: - branches: - - main - - master - paths: - - .github/workflows/semgrep.yml - schedule: - # random HH:MM to avoid a load spike on GitHub Actions at 00:00 - - cron: 44 6 * * * -name: Semgrep -jobs: - semgrep: - name: Scan - runs-on: ubuntu-20.04 - env: - SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} - container: - image: returntocorp/semgrep - steps: - - uses: actions/checkout@v3 - - run: semgrep ci From 6deda86b564f23fdde2586c95d2a62e86549b0aa Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 3 Sep 2023 12:56:54 +0600 Subject: [PATCH 0482/1051] Revert "Revert "Add Semgrep CI (#8201)" (#8477)" (#8478) This reverts commit a683b3624ef9a711593d26a45e5d004733001a7e. --- .github/workflows/semgrep.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/semgrep.yml diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml new file mode 100644 index 00000000000..88d6d45d5a4 --- /dev/null +++ b/.github/workflows/semgrep.yml @@ -0,0 +1,23 @@ +on: + pull_request: {} + push: + branches: + - main + - master + paths: + - .github/workflows/semgrep.yml + schedule: + # random HH:MM to avoid a load spike on GitHub Actions at 00:00 + - cron: 44 6 * * * +name: Semgrep +jobs: + semgrep: + name: Scan + runs-on: ubuntu-20.04 + env: + SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} + container: + image: returntocorp/semgrep + steps: + - uses: actions/checkout@v3 + - run: semgrep ci From 3d40bea92221e249562ba33642bfbe3f06c0f644 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Sep 2023 22:40:34 +0300 Subject: [PATCH 0483/1051] Prepare Celery v5.3.4 Release (#8479) * Added yanked label + warning + explanation to v5.3.2 and v5.3.3 in Changelog.rst and fixed syntax to avoid Markdown * Added Changelog for v5.3.4 --- Changelog.rst | 169 ++++++++++++++++++++++++++++++++------------------ 1 file changed, 108 insertions(+), 61 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index 185cfba8f41..1438bb21b1c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,80 +8,127 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. -.. _version-5.3.3: +.. _version-5.3.4: -v5.3.3 +5.3.4 ===== +:release-date: 2023-09-03 10:10 P.M GMT+2 +:release-by: Tomer Nosrati + +.. warning:: + This version has reverted the breaking changes introduced in 5.3.2 and 5.3.3: + + - Revert "store children with database backend" (#8475) + - Revert "Fix eager tasks does not populate name field" (#8476) + +- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) +- User guide fix (hotfix for #1755) (#8342) +- store children with database backend (#8338) +- Stamping bugfix with group/chord header errback linking (#8347) +- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) +- Fixing minor typo in code example in calling.rst (#8366) +- add documents for timeout settings (#8373) +- fix: copyright year (#8380) +- setup.py: enable include_package_data (#8379) +- Fix eager tasks does not populate name field (#8383) +- Update test.txt dependencies (#8389) +- Update auth.txt deps (#8392) +- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) +- Support preload options for shell and purge commands (#8374) +- Implement safer ArangoDB queries (#8351) +- integration test: cleanup worker after test case (#8361) +- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) +- Update README.rst (#8404) +- Update README.rst (#8408) +- fix(canvas): add group index when unrolling tasks (#8427) +- fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) +- Lint fixes & pre-commit autoupdate (#8414) +- Update auth.txt (#8435) +- Update mypy on test.txt (#8438) +- added missing kwargs arguments in some cli cmd (#8049) +- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) +- Docs: rewrite out-of-date code (#8441) +- Limit redis client to 4.x since 5.x fails the test suite (#8442) +- Limit tox to < 4.9 (#8443) +- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable (#8446) +- doc update from #7651 (#8451) +- Remove tox version limit (#8464) +- Fixed AttributeError: 'str' object has no attribute (#8463) +- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) +- Document need for CELERY_ prefix on CLI env vars (#8469) +- Use string value for CELERY_SKIP_CHECKS envvar (#8462) +- Revert "store children with database backend" (#8475) +- Revert "Fix eager tasks does not populate name field" (#8476) +- Update Changelog (#8474) +- Remove as it seems to be buggy. (#8340) +- Revert "Add Semgrep to CI" (#8477) +- Revert "Revert "Add Semgrep to CI"" (#8478) + +.. _version-5.3.3: + +5.3.3 (Yanked) +============== + :release-date: 2023-08-31 1:47 P.M GMT+2 :release-by: Tomer Nosrati -* Fixed changelog for 5.3.2 release docs. +.. warning:: + This version has been yanked due to breaking API changes. The breaking changes include: + + - Store children with database backend (#8338) + - Fix eager tasks does not populate name field (#8383) + +- Fixed changelog for 5.3.2 release docs. .. _version-5.3.2: -v5.3.2 -===== +5.3.2 (Yanked) +============== :release-date: 2023-08-31 1:30 P.M GMT+2 :release-by: Tomer Nosrati -What's Changed --------------- -* Bugfix: Removed unecessary stamping code from _chord.run() by @Nusnus in https://github.com/celery/celery/pull/8339 -* User guide fix (hotfix for #1755) by @Nusnus in https://github.com/celery/celery/pull/8342 -* store children with database backend by @aaronst in https://github.com/celery/celery/pull/8338 -* Stamping bugfix with group/chord header errback linking by @Nusnus in https://github.com/celery/celery/pull/8347 -* Use argsrepr and kwargsrepr in LOG_RECEIVED by @zhu in https://github.com/celery/celery/pull/8301 -* Fixing minor typo in code example in calling.rst by @dwysocki in https://github.com/celery/celery/pull/8366 -* add documents for timeout settings by @ooyamatakehisa in https://github.com/celery/celery/pull/8373 -* fix: copyright year by @karanganesan in https://github.com/celery/celery/pull/8380 -* setup.py: enable include_package_data by @elohmeier in https://github.com/celery/celery/pull/8379 -* Fix eager tasks does not populate name field by @KOliver94 in https://github.com/celery/celery/pull/8383 -* Update test.txt dependencies by @auvipy in https://github.com/celery/celery/pull/8389 -* Update auth.txt deps by @auvipy in https://github.com/celery/celery/pull/8392 -* Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend by @ycc140 in https://github.com/celery/celery/pull/8391 -* Support preload options for shell and purge commands by @dpdoughe in https://github.com/celery/celery/pull/8374 -* Implement safer ArangoDB queries by @othieno in https://github.com/celery/celery/pull/8351 -* integration test: cleanup worker after test case by @zhu in https://github.com/celery/celery/pull/8361 -* Added "Tomer Nosrati" to CONTRIBUTORS.txt by @Nusnus in https://github.com/celery/celery/pull/8400 -* Update README.rst by @sourabhligade in https://github.com/celery/celery/pull/8404 -* Update README.rst by @sourabhligade in https://github.com/celery/celery/pull/8408 -* fix(canvas): add group index when unrolling tasks by @mkniewallner in https://github.com/celery/celery/pull/8427 -* fix(beat): debug statement should only log AsyncResult.id if it exists by @bmrobin in https://github.com/celery/celery/pull/8428 -* Lint fixes & pre-commit autoupdate by @Nusnus in https://github.com/celery/celery/pull/8414 -* Update auth.txt by @auvipy in https://github.com/celery/celery/pull/8435 -* Update mypy on test.txt by @auvipy in https://github.com/celery/celery/pull/8438 -* added missing kwargs arguments in some cli cmd by @auvipy in https://github.com/celery/celery/pull/8049 -* Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend by @asukero in https://github.com/celery/celery/pull/8432 -* Docs: rewrite out-of-date code by @paradox-lab in https://github.com/celery/celery/pull/8441 -* Limit redis client to 4.x since 5.x fails the test suite by @thedrow in https://github.com/celery/celery/pull/8442 -* Limit tox to < 4.9 by @Nusnus in https://github.com/celery/celery/pull/8443 -* Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable #8433 by @Nusnus in https://github.com/celery/celery/pull/8446 -* doc update from #7651 by @rainnnnny in https://github.com/celery/celery/pull/8451 -* Remove tox version limit by @Nusnus in https://github.com/celery/celery/pull/8464 -* Fixed AttributeError: 'str' object has no attribute by @Nusnus in https://github.com/celery/celery/pull/8463 -* Upgraded Kombu from 5.3.1 -> 5.3.2 by @Nusnus in https://github.com/celery/celery/pull/8468 - -New Contributors ----------------- -* @aaronst made their first contribution in https://github.com/celery/celery/pull/8338 -* @zhu made their first contribution in https://github.com/celery/celery/pull/8301 -* @dwysocki made their first contribution in https://github.com/celery/celery/pull/8366 -* @ooyamatakehisa made their first contribution in https://github.com/celery/celery/pull/8373 -* @karanganesan made their first contribution in https://github.com/celery/celery/pull/8380 -* @elohmeier made their first contribution in https://github.com/celery/celery/pull/8379 -* @KOliver94 made their first contribution in https://github.com/celery/celery/pull/8383 -* @ycc140 made their first contribution in https://github.com/celery/celery/pull/8391 -* @dpdoughe made their first contribution in https://github.com/celery/celery/pull/8374 -* @othieno made their first contribution in https://github.com/celery/celery/pull/8351 -* @sourabhligade made their first contribution in https://github.com/celery/celery/pull/8404 -* @mkniewallner made their first contribution in https://github.com/celery/celery/pull/8427 -* @bmrobin made their first contribution in https://github.com/celery/celery/pull/8428 -* @asukero made their first contribution in https://github.com/celery/celery/pull/8432 -* @rainnnnny made their first contribution in https://github.com/celery/celery/pull/8451 - -**Full Changelog**: https://github.com/celery/celery/compare/v5.3.1...v5.3.2 +.. warning:: + This version has been yanked due to breaking API changes. The breaking changes include: + + - Store children with database backend (#8338) + - Fix eager tasks does not populate name field (#8383) + +- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) +- User guide fix (hotfix for #1755) (#8342) +- Store children with database backend (#8338) +- Stamping bugfix with group/chord header errback linking (#8347) +- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) +- Fixing minor typo in code example in calling.rst (#8366) +- Add documents for timeout settings (#8373) +- Fix: copyright year (#8380) +- Setup.py: enable include_package_data (#8379) +- Fix eager tasks does not populate name field (#8383) +- Update test.txt dependencies (#8389) +- Update auth.txt deps (#8392) +- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) +- Support preload options for shell and purge commands (#8374) +- Implement safer ArangoDB queries (#8351) +- Integration test: cleanup worker after test case (#8361) +- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) +- Update README.rst (#8404) +- Update README.rst (#8408) +- Fix(canvas): add group index when unrolling tasks (#8427) +- Fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) +- Lint fixes & pre-commit autoupdate (#8414) +- Update auth.txt (#8435) +- Update mypy on test.txt (#8438) +- Added missing kwargs arguments in some cli cmd (#8049) +- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) +- Docs: rewrite out-of-date code (#8441) +- Limit redis client to 4.x since 5.x fails the test suite (#8442) +- Limit tox to < 4.9 (#8443) +- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable (#8446) +- Doc update from #7651 (#8451) +- Remove tox version limit (#8464) +- Fixed AttributeError: 'str' object has no attribute (#8463) +- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) .. _version-5.3.1: From 6b3409c60f1ee1902757f982ba8c7f1c9cd8ad9d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Sep 2023 22:42:05 +0300 Subject: [PATCH 0484/1051] =?UTF-8?q?Bump=20version:=205.3.3=20=E2=86=92?= =?UTF-8?q?=205.3.4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 6 +++--- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4f74ddd02fd..18353538fa5 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.3 +current_version = 5.3.4 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 7b3211d3340..cabfbba1d96 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.3 (emerald-rush) +:Version: 5.3.4 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.3 runs on: +Celery version 5.3.4 runs on: - Python (3.8, 3.9, 3.10, 3.11) - PyPy3.8+ (v7.3.11+) @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.3 coming from previous versions then you should read our +new to Celery v5.3.4 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index 6c215c3561d..e11a18c7b7e 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.3' +__version__ = '5.3.4' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 31db9416847..6ce97bb020e 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.3 (emerald-rush) +:Version: 5.3.4 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 504f4aac4e1eb4376948939735ceb6f08b95bdc3 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 4 Sep 2023 15:44:39 +0600 Subject: [PATCH 0485/1051] Update test.txt versions (#8481) --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index f31cf7888f5..1d02f983aa9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.0 +pytest==7.4.1 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.1.0 @@ -7,7 +7,7 @@ pytest-order==1.1.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.5.0; platform_python_implementation=="CPython" +mypy==1.5.1; platform_python_implementation=="CPython" pre-commit==3.3.3 -r extras/yaml.txt -r extras/msgpack.txt From b6a5bdb8b698dbe2a0848e34f76133f2950c5a82 Mon Sep 17 00:00:00 2001 From: Yingcheng Wang Date: Mon, 4 Sep 2023 17:47:23 +0800 Subject: [PATCH 0486/1051] fix os.getcwd() FileNotFoundError (#8448) * fix os.getcwd() FileNotFoundError * fix os.getcwd() FileNotFoundError unit test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix os.getcwd() FileNotFoundError unit test * fix unit test * fix unit test * fix Windows unit test * fix Windows unit test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: hunter Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/utils/imports.py | 9 +++++++-- t/unit/utils/test_imports.py | 25 ++++++++++++++++++++++++- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 390b22ce894..676a4516b8f 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -51,8 +51,13 @@ def instantiate(name, *args, **kwargs): @contextmanager def cwd_in_path(): """Context adding the current working directory to sys.path.""" - cwd = os.getcwd() - if cwd in sys.path: + try: + cwd = os.getcwd() + except FileNotFoundError: + cwd = None + if not cwd: + yield + elif cwd in sys.path: yield else: sys.path.insert(0, cwd) diff --git a/t/unit/utils/test_imports.py b/t/unit/utils/test_imports.py index d3bcedf2234..38632847d6f 100644 --- a/t/unit/utils/test_imports.py +++ b/t/unit/utils/test_imports.py @@ -1,9 +1,12 @@ +import os +import platform import sys from unittest.mock import Mock, patch import pytest -from celery.utils.imports import NotAPackage, find_module, gen_task_name, module_file, qualname, reload_from_cwd +from celery.utils.imports import (NotAPackage, cwd_in_path, find_module, gen_task_name, module_file, qualname, + reload_from_cwd) def test_find_module(): @@ -92,6 +95,26 @@ def test_module_file(): assert module_file(m1) == '/opt/foo/xyz.py' +def test_cwd_in_path(tmp_path, monkeypatch): + now_cwd = os.getcwd() + t = str(tmp_path) + "/foo" + os.mkdir(t) + os.chdir(t) + with cwd_in_path(): + assert os.path.exists(t) is True + + if sys.platform == "win32" or "Windows" in platform.platform(): + # If it is a Windows server, other processes cannot delete the current working directory being used by celery + # . If you want to delete it, you need to terminate the celery process. If it is a Linux server, the current + # working directory of celery can be deleted by other processes. + pass + else: + os.rmdir(t) + with cwd_in_path(): + assert os.path.exists(t) is False + os.chdir(now_cwd) + + class test_gen_task_name: def test_no_module(self): From c53c89407c935f49139ea8487be7f40b3740e2c3 Mon Sep 17 00:00:00 2001 From: Renato Monteiro <45536168+monteiro-renato@users.noreply.github.com> Date: Sun, 10 Sep 2023 16:57:33 +0100 Subject: [PATCH 0487/1051] Fix typo in CONTRIBUTING.rst (#8494) --- CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7ce6913f850..8fdb3df4dc4 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -825,7 +825,7 @@ had to be modified. .. _`Isort`: https://isort.readthedocs.io/en/latest/ -.. _contributing-pull-requets: +.. _contributing-pull-requests: Creating pull requests ---------------------- From 15c1c5d393718aa4c5c0c25445e675d2e117afff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=B3=8A=E6=B6=82?= <104620424+shifenhutu@users.noreply.github.com> Date: Mon, 11 Sep 2023 01:06:25 +0800 Subject: [PATCH 0488/1051] typo: configuration.rst (#8484) https://github.com/celery/celery/issues/8483 --- docs/userguide/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index c3f60abe0ac..fbfc3af9aa7 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -161,7 +161,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_ENABLE_REMOTE_CONTROL`` :setting:`worker_enable_remote_control` ``CELERYD_HIJACK_ROOT_LOGGER`` :setting:`worker_hijack_root_logger` ``CELERYD_LOG_COLOR`` :setting:`worker_log_color` -``CELERYD_LOG_FORMAT`` :setting:`worker_log_format` +``CELERY_WORKER_LOG_FORMAT`` :setting:`worker_log_format` ``CELERYD_WORKER_LOST_WAIT`` :setting:`worker_lost_wait` ``CELERYD_MAX_TASKS_PER_CHILD`` :setting:`worker_max_tasks_per_child` ``CELERYD_POOL`` :setting:`worker_pool` @@ -172,7 +172,7 @@ have been moved into a new ``task_`` prefix. ``CELERYD_REDIRECT_STDOUTS_LEVEL`` :setting:`worker_redirect_stdouts_level` ``CELERY_SEND_EVENTS`` :setting:`worker_send_task_events` ``CELERYD_STATE_DB`` :setting:`worker_state_db` -``CELERYD_TASK_LOG_FORMAT`` :setting:`worker_task_log_format` +``CELERY_WORKER_TASK_LOG_FORMAT`` :setting:`worker_task_log_format` ``CELERYD_TIMER`` :setting:`worker_timer` ``CELERYD_TIMER_PRECISION`` :setting:`worker_timer_precision` ========================================== ============================================== From 5f99e694269db357b6ee3d1216289d8c47e5a034 Mon Sep 17 00:00:00 2001 From: Renato Monteiro <45536168+monteiro-renato@users.noreply.github.com> Date: Sun, 10 Sep 2023 18:11:26 +0100 Subject: [PATCH 0489/1051] assert before raise (#8495) * assert before raise As far as I can see, the raise call was introduced in this commit > https://github.com/celery/celery/commit/d60fa8d40c1fabc637b9497d20079f9bcb04fc24#diff-862dbad852b93aa98f9c885f9c0e9e2b0145ea4cb3d7efcfec66231a5803ab0dR84. I believe, however, it was meant to be called after the assert statement. * remote extra blank line * remove extra spaces --- t/benchmarks/bench_worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/benchmarks/bench_worker.py b/t/benchmarks/bench_worker.py index 55503716d51..89626c5b4e5 100644 --- a/t/benchmarks/bench_worker.py +++ b/t/benchmarks/bench_worker.py @@ -83,8 +83,8 @@ def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'): print('-- starting worker') worker.start() except SystemExit: - raise assert sum(worker.state.total_count.values()) == n + 1 + raise def bench_both(n=DEFAULT_ITS): From 133233fad70908cb1aca58c6b801eeb4caf8c92e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 11 Sep 2023 14:10:30 +0600 Subject: [PATCH 0490/1051] Update GHA checkout version (#8496) --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 40af8568391..2049fe37211 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,7 +42,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu-') run: | sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: @@ -99,7 +99,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: From add91ec512deaa769501a7817fc326f238bfd062 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 13 Sep 2023 17:13:31 +0300 Subject: [PATCH 0491/1051] Fixed replaced_task_nesting (#8500) * Fixed bug where replaced_task_nesting field did not appear in the headers * Added tests under new suite: t/integration/test_tasks.py::test_task_replacement --- celery/app/amqp.py | 3 ++- celery/app/base.py | 4 +-- t/integration/test_tasks.py | 49 +++++++++++++++++++++++++++++++++++-- 3 files changed, 51 insertions(+), 5 deletions(-) diff --git a/celery/app/amqp.py b/celery/app/amqp.py index 9e52af4a66f..e6aae3f8b3c 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -285,7 +285,7 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, create_sent_event=False, root_id=None, parent_id=None, shadow=None, chain=None, now=None, timezone=None, origin=None, ignore_result=False, argsrepr=None, kwargsrepr=None, stamped_headers=None, - **options): + replaced_task_nesting=0, **options): args = args or () kwargs = kwargs or {} @@ -339,6 +339,7 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, 'kwargsrepr': kwargsrepr, 'origin': origin or anon_nodename(), 'ignore_result': ignore_result, + 'replaced_task_nesting': replaced_task_nesting, 'stamped_headers': stamped_headers, 'stamps': stamps, } diff --git a/celery/app/base.py b/celery/app/base.py index cfd71c627fb..fb78893ba2d 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -711,7 +711,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, retries=0, chord=None, reply_to=None, time_limit=None, soft_time_limit=None, root_id=None, parent_id=None, route_name=None, - shadow=None, chain=None, task_type=None, **options): + shadow=None, chain=None, task_type=None, replaced_task_nesting=0, **options): """Send task by name. Supports the same arguments as :meth:`@-Task.apply_async`. @@ -781,7 +781,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, self.conf.task_send_sent_event, root_id, parent_id, shadow, chain, ignore_result=ignore_result, - **options + replaced_task_nesting=replaced_task_nesting, **options ) stamped_headers = options.pop('stamped_headers', []) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index f11314c6f9e..2582357777e 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -9,13 +9,14 @@ import celery from celery import chain, chord, group from celery.canvas import StampingVisitor +from celery.signals import task_received from celery.utils.serialization import UnpickleableExceptionWrapper from celery.worker import state as worker_state -from .conftest import TEST_BACKEND, get_active_redis_channels +from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, - retry_unpickleable, return_properties, sleeping) + retry_unpickleable, return_properties, second_order_replace1, sleeping) TIMEOUT = 10 @@ -533,3 +534,47 @@ def test_asyncresult_get_cancels_subscription(self, manager): new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] assert new_channels == [] + + +class test_task_replacement: + def test_replaced_task_nesting_level_0(self, manager): + @task_received.connect + def task_received_handler(request, **kwargs): + nonlocal assertion_result + + try: + # This tests mainly that the field even exists and set to default 0 + assertion_result = request.replaced_task_nesting < 1 + except Exception: + assertion_result = False + + non_replaced_task = add.si(4, 2) + res = non_replaced_task.delay() + assertion_result = False + assert res.get(timeout=TIMEOUT) == 6 + assert assertion_result + + def test_replaced_task_nesting_level_1(self, manager): + if not manager.app.conf.result_backend.startswith("redis"): + raise pytest.skip("Requires redis result backend.") + + redis_connection = get_redis_connection() + redis_connection.delete("redis-echo") + + @task_received.connect + def task_received_handler(request, **kwargs): + nonlocal assertion_result + + try: + assertion_result = request.replaced_task_nesting < 2 + except Exception: + assertion_result = False + + replaced_task = second_order_replace1.si() + res = replaced_task.delay() + assertion_result = False + res.get(timeout=TIMEOUT) + assert assertion_result + redis_messages = list(redis_connection.lrange("redis-echo", 0, -1)) + expected_messages = [b"In A", b"In B", b"In/Out C", b"Out B", b"Out A"] + assert redis_messages == expected_messages From ea29618bec32354051189bc7285439aafbcfe5c7 Mon Sep 17 00:00:00 2001 From: Stefanie Molin <24376333+stefmolin@users.noreply.github.com> Date: Wed, 13 Sep 2023 12:12:37 -0400 Subject: [PATCH 0492/1051] Fix code indentation for route_task() example --- docs/userguide/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index fbfc3af9aa7..3a8fcdd6a5a 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2349,8 +2349,8 @@ Where ``myapp.tasks.route_task`` could be: .. code-block:: python def route_task(self, name, args, kwargs, options, task=None, **kw): - if task == 'celery.ping': - return {'queue': 'default'} + if task == 'celery.ping': + return {'queue': 'default'} ``route_task`` may return a string or a dict. A string then means it's a queue name in :setting:`task_queues`, a dict means it's a custom route. From 3b20010020cf810a0ba40a27d6d4e83210d4a5e2 Mon Sep 17 00:00:00 2001 From: Dulmandakh Date: Thu, 14 Sep 2023 11:16:34 +0800 Subject: [PATCH 0493/1051] support redis 5.x --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index ef3addb0bd9..35731b915b4 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.5.2,<5.0.0,!=4.5.5 +redis>=4.5.2,<6.0.0,!=4.5.5 From a4fa400253e0a1376bce5239697de4d51b622803 Mon Sep 17 00:00:00 2001 From: Renato Monteiro <45536168+monteiro-renato@users.noreply.github.com> Date: Fri, 15 Sep 2023 12:33:16 +0100 Subject: [PATCH 0494/1051] Fix typos in test_canvas.py (#8498) * Fix typos in test_canvas.py * Fix another typo in test_canvas.py * Fix another typo in test_canvas.py * Update t/integration/test_canvas.py Co-authored-by: Renato Monteiro <45536168+monteiro-renato@users.noreply.github.com> * Make clearer what a zset is --------- Co-authored-by: Asif Saif Uddin --- t/integration/test_canvas.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index d758e97bd4a..4ede84cf9ea 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -165,9 +165,9 @@ def test_link_error_callback_retries(self, manager): @flaky def test_link_error_using_signature_eager(self): fail = signature('t.integration.tasks.fail', args=("test",)) - retrun_exception = signature('t.integration.tasks.return_exception') + return_exception = signature('t.integration.tasks.return_exception') - fail.link_error(retrun_exception) + fail.link_error(return_exception) exception = ExpectedException("Task expected to fail", "test") assert (fail.apply().get(timeout=TIMEOUT, propagate=False), True) == ( @@ -175,9 +175,9 @@ def test_link_error_using_signature_eager(self): def test_link_error_using_signature(self, manager): fail = signature('t.integration.tasks.fail', args=("test",)) - retrun_exception = signature('t.integration.tasks.return_exception') + return_exception = signature('t.integration.tasks.return_exception') - fail.link_error(retrun_exception) + fail.link_error(return_exception) exception = ExpectedException("Task expected to fail", "test") assert (fail.delay().get(timeout=TIMEOUT / 10, propagate=False), True) == ( @@ -1877,7 +1877,7 @@ def test_chord_on_error(self, manager): backend = fail.app.backend j_key = backend.get_key_for_group(original_group_id, '.j') redis_connection = get_redis_connection() - # The redis key is either a list or zset depending on configuration + # The redis key is either a list or a zset (a redis sorted set) depending on configuration if manager.app.conf.result_backend_transport_options.get( 'result_chord_ordered', True ): @@ -3132,12 +3132,12 @@ def task_received_handler(request=None, **kwargs): [ stamped_header in link.options for stamped_header in link.options["stamped_headers"] - if link # the link itself doensn't have a link + if link # the link itself doesn't have a link ], [ stamped_header in link_error.options for stamped_header in link_error.options["stamped_headers"] - if link_error # the link_error itself doensn't have a link + if link_error # the link_error itself doesn't have a link_error ], ] ) From 6705945b46b065c2746b4783da8d16034347b5e1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 16 Sep 2023 18:41:38 +0300 Subject: [PATCH 0495/1051] Marked flaky: test_mutable_errback_called_by_chord_from_group() and test_asyncresult_forget_cancels_subscription() (#8508) --- t/integration/test_canvas.py | 1 + t/integration/test_tasks.py | 1 + 2 files changed, 2 insertions(+) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 4ede84cf9ea..6cec87c68cf 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2499,6 +2499,7 @@ def test_immutable_errback_called_by_chord_from_group( await_redis_echo({errback_msg, }, redis_key=redis_key) redis_connection.delete(redis_key) + @flaky @pytest.mark.parametrize( "errback_task", [errback_old_style, errback_new_style, ], ) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 2582357777e..fa2fdedb816 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -511,6 +511,7 @@ def test_ignoring_result_no_subscriptions(self, manager): new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] assert new_channels == [] + @flaky def test_asyncresult_forget_cancels_subscription(self, manager): channels_before_test = get_active_redis_channels() From 13e367f071c45383bbd36b2630aecd43b8447f18 Mon Sep 17 00:00:00 2001 From: "Kuan-Wei, Chiu" Date: Sun, 17 Sep 2023 00:02:52 +0800 Subject: [PATCH 0496/1051] Fix typos in calling.rst (#8506) Corrected a missing 'to' in the sentence and added a missing ')'. --- docs/userguide/calling.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index e3c0f84c18c..40e0aeced08 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -315,7 +315,7 @@ either as seconds after task publish, or a specific date and time using >>> # Also supports datetime >>> from datetime import datetime, timedelta >>> add.apply_async((10, 10), kwargs, - ... expires=datetime.now() + timedelta(days=1) + ... expires=datetime.now() + timedelta(days=1)) When a worker receives an expired task it will mark @@ -555,7 +555,7 @@ msgpack -- msgpack is a binary serialization format that's closer to JSON See http://msgpack.org/ for more information. -To use a custom serializer you need add the content type to +To use a custom serializer you need to add the content type to :setting:`accept_content`. By default, only JSON is accepted, and tasks containing other content headers are rejected. From 7643e743cf21463e91d6c2a3d36699e597e6a8b1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 16 Sep 2023 19:52:10 +0300 Subject: [PATCH 0497/1051] Added support for replaced_task_nesting in chains (#8501) * Added support for replaced_task_nesting in chains * Added integration tests in t/integration/test_tasks.py::test_replaced_task_nesting_chain() * Added: test_replace_chain() --- celery/app/task.py | 11 ++++++++++- t/integration/test_tasks.py | 27 ++++++++++++++++++++++++++- t/unit/tasks/test_tasks.py | 11 ++++++++++- 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 7998d600b76..cceb2a09ccd 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -954,11 +954,20 @@ def replace(self, sig): root_id=self.request.root_id, replaced_task_nesting=replaced_task_nesting ) + + # If the replaced task is a chain, we want to set all of the chain tasks + # with the same replaced_task_nesting value to mark their replacement nesting level + if isinstance(sig, _chain): + for chain_task in maybe_list(sig.tasks) or []: + chain_task.set(replaced_task_nesting=replaced_task_nesting) + # If the task being replaced is part of a chain, we need to re-create # it with the replacement signature - these subsequent tasks will # retain their original task IDs as well for t in reversed(self.request.chain or []): - sig |= signature(t, app=self.app) + chain_task = signature(t, app=self.app) + chain_task.set(replaced_task_nesting=replaced_task_nesting) + sig |= chain_task return self.on_replace(sig) def add_to_chord(self, sig, lazy=False): diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index fa2fdedb816..5dc5c955358 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -567,7 +567,7 @@ def task_received_handler(request, **kwargs): nonlocal assertion_result try: - assertion_result = request.replaced_task_nesting < 2 + assertion_result = request.replaced_task_nesting <= 2 except Exception: assertion_result = False @@ -579,3 +579,28 @@ def task_received_handler(request, **kwargs): redis_messages = list(redis_connection.lrange("redis-echo", 0, -1)) expected_messages = [b"In A", b"In B", b"In/Out C", b"Out B", b"Out A"] assert redis_messages == expected_messages + + def test_replaced_task_nesting_chain(self, manager): + if not manager.app.conf.result_backend.startswith("redis"): + raise pytest.skip("Requires redis result backend.") + + redis_connection = get_redis_connection() + redis_connection.delete("redis-echo") + + @task_received.connect + def task_received_handler(request, **kwargs): + nonlocal assertion_result + + try: + assertion_result = request.replaced_task_nesting <= 3 + except Exception: + assertion_result = False + + assertion_result = False + chain_task = second_order_replace1.si() | add.si(4, 2) + res = chain_task.delay() + res.get(timeout=TIMEOUT) + assert assertion_result + redis_messages = list(redis_connection.lrange("redis-echo", 0, -1)) + expected_messages = [b"In A", b"In B", b"In/Out C", b"Out B", b"Out A"] + assert redis_messages == expected_messages diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 36bb792b16d..5cff1c3db07 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -7,7 +7,7 @@ from kombu import Queue from kombu.exceptions import EncodeError -from celery import Task, group, uuid +from celery import Task, chain, group, uuid from celery.app.task import _reprtask from celery.canvas import StampingVisitor, signature from celery.contrib.testing.mocks import ContextMock @@ -1198,6 +1198,15 @@ def test_replace_group(self): with pytest.raises(Ignore): self.mytask.replace(c) + def test_replace_chain(self): + c = chain([self.mytask.si(), self.mytask.si()], app=self.app) + c.freeze = Mock(name='freeze') + c.delay = Mock(name='delay') + self.mytask.request.id = 'id' + self.mytask.request.chain = c + with pytest.raises(Ignore): + self.mytask.replace(c) + def test_replace_run(self): with pytest.raises(Ignore): self.task_replaced_by_other_task.run() From 9a9ab47d4b7bf78128b4d8e05166486d8921ee39 Mon Sep 17 00:00:00 2001 From: Kuan-Wei Chiu Date: Sun, 17 Sep 2023 09:15:18 +0800 Subject: [PATCH 0498/1051] Fix typos in canvas.rst Improved overall readability by fixing various typos and grammar issues. - Corrected "Here's" to "Here're" for plural consistency. - Fixed a grammatical error by changing "task" to "tasks." - Corrected "received" to "receive" for proper verb tense. - Added an apostrophe to "chunks" to indicate possession. - Replaced "lets" with "let's" for proper contraction. --- docs/userguide/canvas.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 8264f531fa4..b87dabca17c 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -308,7 +308,7 @@ The Primitives The primitives are also signature objects themselves, so that they can be combined in any number of ways to compose complex work-flows. -Here's some examples: +Here're some examples: - Simple chain @@ -389,7 +389,7 @@ Here's some examples: >>> res.get() 90 - The above example creates 10 task that all start in parallel, + The above example creates 10 tasks that all start in parallel, and when all of them are complete the return values are combined into a list and sent to the ``tsum`` task. @@ -706,7 +706,7 @@ a linked callback signature. Additionally, linking the task will *not* guarantee that it will activate only when all group tasks have finished. As an example, the following snippet using a simple `add(a, b)` task is faulty -since the linked `add.s()` signature will not received the finalised group +since the linked `add.s()` signature will not receive the finalised group result as one might expect. .. code-block:: pycon @@ -1085,7 +1085,7 @@ of parallelism, but this is rarely true for a busy cluster and in practice since you're avoiding the overhead of messaging it may considerably increase performance. -To create a chunks signature you can use :meth:`@Task.chunks`: +To create a chunks' signature you can use :meth:`@Task.chunks`: .. code-block:: pycon @@ -1232,7 +1232,7 @@ the external monitoring system, etc. def on_signature(self, sig, **headers) -> dict: return {'monitoring_id': uuid4().hex, 'stamped_headers': ['monitoring_id']} -Next, lets see how to use the ``MonitoringIdStampingVisitor`` example stamping visitor. +Next, let's see how to use the ``MonitoringIdStampingVisitor`` example stamping visitor. .. code-block:: python @@ -1261,7 +1261,7 @@ visitor will be applied to the callback as well. The callback must be linked to the signature before stamping. -For example, lets examine the following custom stamping visitor. +For example, let's examine the following custom stamping visitor. .. code-block:: python From ba994d86979080e43c5e752591e6faedaafc3b2a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 17 Sep 2023 15:38:47 +0300 Subject: [PATCH 0499/1051] Patch Version Release Checklist (#8488) * Added new issue template for maintainers only: Patch-Version-Release-Checklist.md The issue will guide maintainers on how to execute patch releases. It will contain both instructions and a live checklist for the community to follow on progress and updates. * Apply suggestions from code review by @thedrow Co-authored-by: Omer Katz * Added comment about yanking a faulty release --------- Co-authored-by: Omer Katz --- .../Patch-Version-Release-Checklist.md | 136 ++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/Patch-Version-Release-Checklist.md diff --git a/.github/ISSUE_TEMPLATE/Patch-Version-Release-Checklist.md b/.github/ISSUE_TEMPLATE/Patch-Version-Release-Checklist.md new file mode 100644 index 00000000000..0140d93e1c3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/Patch-Version-Release-Checklist.md @@ -0,0 +1,136 @@ +--- +name: Patch Version Release Checklist +about: About to release a new patch version? (Maintainers Only!) +title: '' +labels: '' +assignees: '' + +--- + +# Patch Release Overview: v + +This issue will summarize the status and discussion in preparation for the new release. It will be used to track the progress of the release and to ensure that all the necessary steps are taken. It will serve as a checklist for the release and will be used to communicate the status of the release to the community. + +> ⚠️ **Warning:** The release checklist is a living document. It will be updated as the release progresses. Please check back often to ensure that you are up to date with the latest information. + +## Checklist +- [ ] Codebase Stability +- [ ] Breaking Changes Validation +- [ ] Compile Changelog +- [ ] Release +- [ ] Release Announcement + +# Release Details +The release manager is responsible for completing the release end-to-end ensuring that all the necessary steps are taken and that the release is completed in a timely manner. This is usually the owner of the release issue but may be assigned to a different maintainer if necessary. + +- Release Manager: +- Release Date: +- Release Branch: `main` + +# Release Steps +The release manager is expected to execute the checklist below. The release manager is also responsible for ensuring that the checklist is updated as the release progresses. Any changes or issues should be communicated under this issue for centralized tracking. + +## 1. Codebase Stability +- [ ] The `main` branch build passes + + [![Build Status](https://github.com/celery/celery/actions/workflows/python-package.yml/badge.svg)](https://github.com/celery/celery/actions/workflows/python-package.yml) + +## 2. Breaking Changes Validation +A patch release should not contain any breaking changes. The release manager is responsible for reviewing all of the merged PRs since the last release to ensure that there are no breaking changes. If there are any breaking changes, the release manager should discuss with the maintainers to determine the best course of action if an obvious solution is not apparent. + +## 3. Compile Changelog +The release changelog is set in two different places: +1. The [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) that uses the RST format. +2. The GitHub Release auto-generated changelog that uses the Markdown format. This is auto-generated by the GitHub Draft Release UI. + +> ⚠️ **Warning:** The pre-commit changes should not be included in the changelog. + +To generate the changelog automatically, [draft a new release](https://github.com/celery/celery/releases/new) on GitHub using a fake new version tag for the automatic changelog generation. Notice the actual tag creation is done **on publish** so we can use that to generate the changelog and then delete the draft release without publishing it thus avoiding creating a new tag. + +- Create a new tag +CleanShot 2023-09-05 at 22 06 24@2x + +- Generate Markdown release notes +CleanShot 2023-09-05 at 22 13 39@2x + +- Copy the generated release notes. + +- Delete the draft release without publishing it. + +### 3.1 Changelog.rst +Once you have the actual changes, you need to convert it to rst format and add it to the [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) file. The new version block needs to follow the following format: +```rst +.. _version-x.y.z: + +x.y.z +===== + +:release-date: YYYY-MM-DD HH:MM P.M/A.M TimeZone +:release-by: Release Manager Name + +Changes list in RST format. +``` + +These changes will reflect in the [Change history](https://docs.celeryq.dev/en/stable/changelog.html) section of the documentation. + +### 3.2 Changelog PR +The changes to the [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) file should be submitted as a PR. This will PR should be the last merged PR before the release. + +## 4. Release +### 4.1 Prepare releasing environment +Before moving forward with the release, the release manager should ensure that bumpversion and twine are installed. These are required to publish the release. + +### 4.2 Bump version +The release manager should bump the version using the following command: +```bash +bumpversion patch +``` +The changes should be pushed directly to main by the release manager. + +At this point, the git log should appear somewhat similar to this: +``` +commit XXX (HEAD -> main, tag: vX.Y.Z, upstream/main, origin/main) +Author: Release Manager +Date: YYY + + Bump version: a.b.c → x.y.z + +commit XXX +Author: Release Manager +Date: YYY + + Added changelog for vX.Y.Z (#1234) +``` +If everything looks good, the bump version commit can be directly pushed to `main`: +```bash +git push origin main --tags +``` + +### 4.3 Publish release to PyPI +The release manager should publish the release to PyPI using the following commands running under the root directory of the repository: +```bash +python setup.py clean build sdist bdist_wheel +``` +If the build is successful, the release manager should publish the release to PyPI using the following command: +```bash +twine upload dist/celery-X.Y.Z* +``` + +> ⚠️ **Warning:** The release manager should double check that the release details are correct (project/version) before publishing the release to PyPI. + +> ⚠️ **Critical Reminder:** Should the released package prove to be faulty or need retraction for any reason, do not delete it from PyPI. The appropriate course of action is to "yank" the release. + +## Release Announcement +After the release is published, the release manager should create a new GitHub Release and set it as the latest release. + +CleanShot 2023-09-05 at 22 51 24@2x + +### Add Release Notes +On a per-case basis, the release manager may also attach an additional release note to the auto-generated release notes. This is usually done when there are important changes that are not reflected in the auto-generated release notes. + +### OpenCollective Update +After successfully publishing the new release, the release manager is responsible for announcing it on the project's OpenCollective [page](https://opencollective.com/celery/updates). This is to engage with the community and keep backers and sponsors in the loop. + + +# Release Blockers + \ No newline at end of file From 92d073821798a86e0dd8695e8ddd344ad33d44d0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 17 Sep 2023 22:15:50 +0300 Subject: [PATCH 0500/1051] Added Python 3.11 support to Dockerfile (#8511) --- docker/Dockerfile | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 66ca8a30a78..1bf839d18d5 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -62,9 +62,10 @@ RUN curl https://pyenv.run | bash RUN pyenv install 3.8 RUN pyenv install 3.9 RUN pyenv install 3.10 +RUN pyenv install 3.11 # Set global Python versions -RUN pyenv global 3.8 3.9 3.10 +RUN pyenv global 3.8 3.9 3.10 3.11 # Install celery WORKDIR $HOME @@ -73,17 +74,19 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local 3.8 3.9 3.10 +RUN pyenv local 3.8 3.9 3.10 3.11 RUN pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel + pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel COPY --chown=1000:1000 . $HOME/celery RUN pyenv exec python3.8 -m pip install -e $HOME/celery && \ pyenv exec python3.9 -m pip install -e $HOME/celery && \ - pyenv exec python3.10 -m pip install -e $HOME/celery + pyenv exec python3.10 -m pip install -e $HOME/celery && \ + pyenv exec python3.11 -m pip install -e $HOME/celery # Setup one celery environment for basic development use RUN pyenv exec python3.8 -m pip install \ @@ -115,7 +118,17 @@ RUN pyenv exec python3.8 -m pip install \ -r requirements/test-ci-default.txt \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ - -r requirements/test.txt + -r requirements/test.txt && \ + pyenv exec python3.11 -m pip install \ + -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt WORKDIR $HOME/celery From 270ee0d9f9af963e211819a26f01d2800d7264d3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 17 Sep 2023 21:34:55 +0300 Subject: [PATCH 0501/1051] Added .github/dependabot.yml --- .github/dependabot.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..123014908be --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" From d1dd9d4eaeb1fdcdb387ba350eafc08082c49525 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 11:20:26 +0000 Subject: [PATCH 0502/1051] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/linter.yml | 2 +- .github/workflows/semgrep.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a051d05bafc..65e0f6c8ca5 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -37,7 +37,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index ac393f42798..31fa81f88cf 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -8,7 +8,7 @@ jobs: steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Run pre-commit uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml index 88d6d45d5a4..1352b65ae16 100644 --- a/.github/workflows/semgrep.yml +++ b/.github/workflows/semgrep.yml @@ -19,5 +19,5 @@ jobs: container: image: returntocorp/semgrep steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: semgrep ci From 829915158e0a3bf301637395da6ad818c8acbf6d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 23:35:32 +0300 Subject: [PATCH 0503/1051] [pre-commit.ci] pre-commit autoupdate (#8515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.10.1 → v3.11.0](https://github.com/asottile/pyupgrade/compare/v3.10.1...v3.11.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8be176b4c41..25428b53f17 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + rev: v3.11.0 hooks: - id: pyupgrade args: ["--py38-plus"] From 06a378f2af52d4f44420427272067bde726c7006 Mon Sep 17 00:00:00 2001 From: Anup Mantri <829820+amantri@users.noreply.github.com> Date: Mon, 18 Sep 2023 13:46:10 -0700 Subject: [PATCH 0504/1051] Update ETA example to include timezone (#8516) The behavior when using timezone unaware datetime objects can be incorrect. For a Redis broker running on the same machine as my Celery queue, I got incorrect scheduling when using naive datetime objects. Setting the timezone explicitly fixed the issue. --- docs/userguide/calling.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index 40e0aeced08..b41db9e0d10 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -250,9 +250,9 @@ and timezone information): .. code-block:: pycon - >>> from datetime import datetime, timedelta + >>> from datetime import datetime, timedelta, timezone - >>> tomorrow = datetime.utcnow() + timedelta(days=1) + >>> tomorrow = datetime.now(timezone.utc) + timedelta(days=1) >>> add.apply_async((2, 2), eta=tomorrow) .. warning:: @@ -313,9 +313,9 @@ either as seconds after task publish, or a specific date and time using >>> add.apply_async((10, 10), expires=60) >>> # Also supports datetime - >>> from datetime import datetime, timedelta + >>> from datetime import datetime, timedelta, timezone >>> add.apply_async((10, 10), kwargs, - ... expires=datetime.now() + timedelta(days=1)) + ... expires=datetime.now(timezone.utc) + timedelta(days=1)) When a worker receives an expired task it will mark From bbe8775508719ac87f3bfcb1eaf6642543c7c5ab Mon Sep 17 00:00:00 2001 From: Trenton H <797416+stumpylog@users.noreply.github.com> Date: Tue, 19 Sep 2023 00:34:51 -0700 Subject: [PATCH 0505/1051] Replaces datetime.fromisoformat with the more lenient dateutil parser (#8507) * Replaces datetime.fromisoformat with the more lenient dateutil parser * Adds additional testing of maybo_iso8601 --- .github/workflows/python-package.yml | 2 +- celery/app/base.py | 3 ++- celery/result.py | 3 ++- celery/utils/iso8601.py | 2 +- celery/utils/time.py | 3 ++- t/unit/utils/test_time.py | 12 ++++++++++++ 6 files changed, 20 insertions(+), 5 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2049fe37211..078c5a4fbb5 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -36,7 +36,7 @@ jobs: os: "windows-latest" - python-version: 'pypy-3.10' os: "windows-latest" - + steps: - name: Install apt packages if: startsWith(matrix.os, 'ubuntu-') diff --git a/celery/app/base.py b/celery/app/base.py index fb78893ba2d..4846a913bf4 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -9,6 +9,7 @@ from operator import attrgetter from click.exceptions import Exit +from dateutil.parser import isoparse from kombu import pools from kombu.clocks import LamportClock from kombu.common import oid_from @@ -740,7 +741,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, expires) - self.now()).total_seconds() elif isinstance(expires, str): expires_s = (maybe_make_aware( - datetime.fromisoformat(expires)) - self.now()).total_seconds() + isoparse(expires)) - self.now()).total_seconds() else: expires_s = expires diff --git a/celery/result.py b/celery/result.py index 0c9e0a30f21..065d9ca5158 100644 --- a/celery/result.py +++ b/celery/result.py @@ -6,6 +6,7 @@ from contextlib import contextmanager from weakref import proxy +from dateutil.parser import isoparse from kombu.utils.objects import cached_property from vine import Thenable, barrier, promise @@ -532,7 +533,7 @@ def date_done(self): """UTC date and time.""" date_done = self._get_task_meta().get('date_done') if date_done and not isinstance(date_done, datetime.datetime): - return datetime.datetime.fromisoformat(date_done) + return isoparse(date_done) return date_done @property diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index ffe342b40c8..74aff491a69 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -52,7 +52,7 @@ def parse_iso8601(datestring): """Parse and convert ISO-8601 string to datetime.""" - warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat") + warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat or dateutil.parser.isoparse") m = ISO8601_REGEX.match(datestring) if not m: raise ValueError('unable to parse date string %r' % datestring) diff --git a/celery/utils/time.py b/celery/utils/time.py index f5329a5e39b..ba94d7951b1 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -14,6 +14,7 @@ from typing import Any, Callable from dateutil import tz as dateutil_tz +from dateutil.parser import isoparse from kombu.utils.functional import reprcall from kombu.utils.objects import cached_property @@ -288,7 +289,7 @@ def maybe_iso8601(dt: datetime | str | None) -> None | datetime: return if isinstance(dt, datetime): return dt - return datetime.fromisoformat(dt) + return isoparse(dt) def is_naive(dt: datetime) -> bool: diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 9841f364c5a..80d5db973a1 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -101,6 +101,18 @@ def test_maybe_iso8601_datetime(): assert maybe_iso8601(now) is now +@pytest.mark.parametrize('date_str,expected', [ + ('2011-11-04T00:05:23', datetime(2011, 11, 4, 0, 5, 23)), + ('2011-11-04T00:05:23Z', datetime(2011, 11, 4, 0, 5, 23, tzinfo=_timezone.utc)), + ('2011-11-04 00:05:23.283+00:00', + datetime(2011, 11, 4, 0, 5, 23, 283000, tzinfo=_timezone.utc)), + ('2011-11-04T00:05:23+04:00', + datetime(2011, 11, 4, 0, 5, 23, tzinfo=_timezone(timedelta(seconds=14400)))), +]) +def test_iso8601_string_datetime(date_str, expected): + assert maybe_iso8601(date_str) == expected + + @pytest.mark.parametrize('arg,expected', [ (30, timedelta(seconds=30)), (30.6, timedelta(seconds=30.6)), From dfe2e919c62747aaf1ba17ebfd066256e26de459 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 21 Sep 2023 01:20:25 +0300 Subject: [PATCH 0506/1051] Fixed indentation in Dockerfile for Python 3.11 (#8527) --- docker/Dockerfile | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 1bf839d18d5..7d469686073 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -119,16 +119,16 @@ RUN pyenv exec python3.8 -m pip install \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ -r requirements/test.txt && \ - pyenv exec python3.11 -m pip install \ - -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt + pyenv exec python3.11 -m pip install \ + -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt WORKDIR $HOME/celery From 4089d564a8f4ce48a8d4dfc33865f8856bd957d2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 21 Sep 2023 02:50:31 +0300 Subject: [PATCH 0507/1051] Fix git bug in Dockerfile (#8528) --- docker/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/Dockerfile b/docker/Dockerfile index 7d469686073..8afdccaa859 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -132,6 +132,8 @@ RUN pyenv exec python3.8 -m pip install \ WORKDIR $HOME/celery +RUN git config --global --add safe.directory /home/developer/celery + # Setup the entrypoint, this ensures pyenv is initialized when a container is started # and that any compiled files from earlier steps or from mounts are removed to avoid # pytest failing with an ImportMismatchError From 99b000d9640856eed01a6535318b884282a9e64d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 21 Sep 2023 01:06:59 +0300 Subject: [PATCH 0508/1051] Changed tox run for lint,apicheck,linkcheck,configcheck,bandit from Python 3.9 to Python 3.11 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 59d3676d1e3..96a0d4d24a2 100644 --- a/tox.ini +++ b/tox.ini @@ -81,7 +81,7 @@ basepython = 3.11: python3.11 pypy3: pypy3 mypy: python3.8 - lint,apicheck,linkcheck,configcheck,bandit: python3.9 + lint,apicheck,linkcheck,configcheck,bandit: python3.11 usedevelop = True [testenv:mypy] From 61b763b62dc5f846452986cdba3ffac75c5b3014 Mon Sep 17 00:00:00 2001 From: Daniel M Date: Wed, 20 Sep 2023 23:20:57 -0400 Subject: [PATCH 0509/1051] Document gevent concurrency (#8520) * Document gevent concurrency * Add known issues * Update docs/userguide/concurrency/gevent.rst Co-authored-by: Asif Saif Uddin * Update docs/userguide/concurrency/gevent.rst * Update docs/userguide/concurrency/gevent.rst * Update docs/userguide/concurrency/gevent.rst * Update examples/gevent/README.rst --------- Co-authored-by: Omer Katz Co-authored-by: Asif Saif Uddin --- docs/userguide/concurrency/gevent.rst | 79 +++++++++++++++++++++++++++ docs/userguide/concurrency/index.rst | 1 + examples/gevent/README.rst | 51 +++++++++++++++++ 3 files changed, 131 insertions(+) create mode 100644 docs/userguide/concurrency/gevent.rst create mode 100644 examples/gevent/README.rst diff --git a/docs/userguide/concurrency/gevent.rst b/docs/userguide/concurrency/gevent.rst new file mode 100644 index 00000000000..7ec8eca414e --- /dev/null +++ b/docs/userguide/concurrency/gevent.rst @@ -0,0 +1,79 @@ +.. _concurrency-eventlet: + +=========================== + Concurrency with gevent +=========================== + +.. _gevent-introduction: + +Introduction +============ + +The `gevent`_ homepage describes it a coroutine_ -based Python_ networking library that uses +`greenlet `_ to provide a high-level synchronous API on top of the `libev`_ +or `libuv`_ event loop. + +Features include: + +* Fast event loop based on `libev`_ or `libuv`_. +* Lightweight execution units based on greenlets. +* API that re-uses concepts from the Python standard library (for + examples there are `events`_ and + `queues`_). +* `Cooperative sockets with SSL support `_ +* `Cooperative DNS queries `_ performed through a threadpool, + dnspython, or c-ares. +* `Monkey patching utility `_ to get 3rd party modules to become cooperative +* TCP/UDP/HTTP servers +* Subprocess support (through `gevent.subprocess`_) +* Thread pools + +gevent is `inspired by eventlet`_ but features a more consistent API, +simpler implementation and better performance. Read why others `use +gevent`_ and check out the list of the `open source projects based on +gevent`_. + + +Enabling gevent +================= + +You can enable the gevent pool by using the +:option:`celery worker -P gevent` or :option:`celery worker --pool=gevent` +worker option. + +.. code-block:: console + + $ celery -A proj worker -P gevent -c 1000 + +.. _eventlet-examples: + +Examples +======== + +See the `gevent examples`_ directory in the Celery distribution for +some examples taking use of Eventlet support. + +Known issues +============ +There is a known issue using python 3.11 and gevent. +The issue is documented `here`_ and addressed in a `gevent issue`_. +Upgrading to greenlet 3.0 solves it. + +.. _events: http://www.gevent.org/api/gevent.event.html#gevent.event.Event +.. _queues: http://www.gevent.org/api/gevent.queue.html#gevent.queue.Queue +.. _`gevent`: http://www.gevent.org/ +.. _`gevent examples`: + https://github.com/celery/celery/tree/main/examples/gevent +.. _gevent.subprocess: http://www.gevent.org/api/gevent.subprocess.html#module-gevent.subprocess + +.. _coroutine: https://en.wikipedia.org/wiki/Coroutine +.. _Python: http://python.org +.. _libev: http://software.schmorp.de/pkg/libev.html +.. _libuv: http://libuv.org +.. _inspired by eventlet: http://blog.gevent.org/2010/02/27/why-gevent/ +.. _use gevent: http://groups.google.com/group/gevent/browse_thread/thread/4de9703e5dca8271 +.. _open source projects based on gevent: https://github.com/gevent/gevent/wiki/Projects +.. _what's new: http://www.gevent.org/whatsnew_1_5.html +.. _changelog: http://www.gevent.org/changelog.html +.. _here: https://github.com/celery/celery/issues/8425 +.. _gevent issue: https://github.com/gevent/gevent/issues/1985 diff --git a/docs/userguide/concurrency/index.rst b/docs/userguide/concurrency/index.rst index 4bdf54b202d..75faac8e98d 100644 --- a/docs/userguide/concurrency/index.rst +++ b/docs/userguide/concurrency/index.rst @@ -11,3 +11,4 @@ :maxdepth: 2 eventlet + gevent diff --git a/examples/gevent/README.rst b/examples/gevent/README.rst new file mode 100644 index 00000000000..8ef429ec8a1 --- /dev/null +++ b/examples/gevent/README.rst @@ -0,0 +1,51 @@ +================================== + Example using the gevent Pool +================================== + +Introduction +============ + +This is a Celery application containing two example tasks. + +First you need to install gevent:: + + $ python -m pip install gevent celery pybloom-live + +Before you run any of the example tasks you need to start +the worker:: + + $ cd examples/gevent + $ celery worker -l INFO --concurrency=500 --pool=gevent + +As usual you need to have RabbitMQ running, see the Celery getting started +guide if you haven't installed it yet. + +Tasks +===== + +* `tasks.urlopen` + +This task simply makes a request opening the URL and returns the size +of the response body:: + + $ cd examples/gevent + $ python + >>> from tasks import urlopen + >>> urlopen.delay('https://www.google.com/').get() + 9980 + +To open several URLs at once you can do:: + + $ cd examples/gevent + $ python + >>> from tasks import urlopen + >>> from celery import group + >>> result = group(urlopen.s(url) + ... for url in LIST_OF_URLS).apply_async() + >>> for incoming_result in result.iter_native(): + ... print(incoming_result) + + +This is a simple recursive web crawler. It will only crawl +URLs for the current host name. Please see comments in the +`webcrawler.py` file. From 65ac2ac45deda39d7a05535d5f8489f09eaa3c5d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 21 Sep 2023 11:04:55 +0600 Subject: [PATCH 0510/1051] Update test.txt (#8530) --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 1d02f983aa9..0900248ada6 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.1 +pytest==7.4.2 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.1.0 @@ -8,7 +8,7 @@ boto3>=1.26.143 moto>=4.1.11 # typing extensions mypy==1.5.1; platform_python_implementation=="CPython" -pre-commit==3.3.3 +pre-commit==3.4.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From 20b396d6e02c0b91f2e4663d0cd2355f76799c5e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 21 Sep 2023 13:53:47 +0300 Subject: [PATCH 0511/1051] Celery Docker Upgrades (#8531) * Added -docker environment to tox envlist to allow running integration tests via the docker compose broker and backend containers * Set default python for celery docker container to 3.11 from 3.8 * Added make commands: docker-build, docker-lint, docker-unit-tests, docker-unit-tests (partially supported), docker-bash * Added new Docker CI Workflow to validate the docker image is built correctly * No-op code change to trigger full CI --- .github/workflows/docker.yml | 29 +++++++++++++++++++++++++++++ Makefile | 33 +++++++++++++++++++++++++++++++++ docker/Dockerfile | 2 +- t/integration/test_canvas.py | 2 +- tox.ini | 5 ++++- 5 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/docker.yml diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 00000000000..6f7319c2bca --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,29 @@ +name: Docker + +on: + push: + branches: [ 'main'] + paths: + - '**.py' + - '**.txt' + - '**.toml' + - './docker/**' + - '.github/workflows/docker.yml' + pull_request: + branches: [ 'main'] + paths: + - '**.py' + - '**.txt' + - '**.toml' + - './docker/**' + - '.github/workflows/docker.yml' + + +jobs: + docker-build: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v4 + - name: Build Docker container + run: make docker-build \ No newline at end of file diff --git a/Makefile b/Makefile index 4b64f228e5d..858b4fabfdd 100644 --- a/Makefile +++ b/Makefile @@ -53,6 +53,12 @@ help: @echo "bump-minor - Bump minor version number." @echo "bump-major - Bump major version number." @echo "release - Make PyPI release." + @echo "" + @echo "Docker-specific commands:" + @echo " docker-build - Build celery docker container." + @echo " docker-lint - Run tox -e lint on docker container." + @echo " docker-unit-tests - Run unit tests on docker container, use '-- -k ' for specific test run." + @echo " docker-bash - Get a bash shell inside the container." clean: clean-docs clean-pyc clean-build @@ -167,3 +173,30 @@ graph: clean-graph $(WORKER_GRAPH) authorcheck: git shortlog -se | cut -f2 | extra/release/attribution.py + +.PHONY: docker-build +docker-build: + @docker-compose -f docker/docker-compose.yml build + +.PHONY: docker-lint +docker-lint: + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e lint + +.PHONY: docker-unit-tests +docker-unit-tests: + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.11-unit -- $(filter-out $@,$(MAKECMDGOALS)) + +# Integration tests are not fully supported when running in a docker container yet so we allow them to +# gracefully fail until fully supported. +# TODO: Add documentation (in help command) when fully supported. +.PHONY: docker-integration-tests +docker-integration-tests: + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.11-integration-docker -- --maxfail=1000 + +.PHONY: docker-bash +docker-bash: + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery bash + +.PHONY: catch-all +%: catch-all + @: diff --git a/docker/Dockerfile b/docker/Dockerfile index 8afdccaa859..ddda214a38c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -74,7 +74,7 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local 3.8 3.9 3.10 3.11 +RUN pyenv local 3.11 3.10 3.9 3.8 RUN pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 6cec87c68cf..5673c5e60c2 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3500,6 +3500,6 @@ def on_signature(self, sig, **headers) -> dict: canvas.options["link_error"] = dict(fail.si()) canvas.stamp(visitor=CustomStampingVisitor()) - with subtests.test(msg='Expect canvas to fail'): + with subtests.test(msg="Expect canvas to fail"): with pytest.raises(ExpectedException): canvas.apply_async().get(timeout=TIMEOUT) diff --git a/tox.ini b/tox.ini index 96a0d4d24a2..0b82e2d3ec0 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,7 @@ requires = tox-gh-actions envlist = {3.8,3.9,3.10,3.11,pypy3}-unit - {3.8,3.9,3.10,3.11,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch} + {3.8,3.9,3.10,3.11,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} flake8 apicheck @@ -66,6 +66,9 @@ setenv = rabbitmq_redis: TEST_BROKER=pyamqp:// rabbitmq_redis: TEST_BACKEND=redis:// + docker: TEST_BROKER=pyamqp://rabbit:5672 + docker: TEST_BACKEND=redis://redis + dynamodb: TEST_BROKER=redis:// dynamodb: TEST_BACKEND=dynamodb://@localhost:8000 dynamodb: AWS_ACCESS_KEY_ID=test_aws_key_id From 1683008881717d2f8391264cb2b6177d85ff5ea8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 25 Sep 2023 18:16:06 +0300 Subject: [PATCH 0512/1051] pyupgrade upgrade v3.11.0 -> v3.13.0 (#8535) * pyupgrade upgrade v3.11.0 -> v3.13.0 * pre-commit auto fixes --- .pre-commit-config.yaml | 2 +- celery/platforms.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 25428b53f17..1a258458959 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.11.0 + rev: v3.13.0 hooks: - id: pyupgrade args: ["--py38-plus"] diff --git a/celery/platforms.py b/celery/platforms.py index f424ac37ab4..6203f2c29b5 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -189,7 +189,7 @@ def remove_if_stale(self): try: os.kill(pid, 0) - except os.error as exc: + except OSError as exc: if exc.errno == errno.ESRCH or exc.errno == errno.EPERM: print('Stale pidfile exists - Removing it.', file=sys.stderr) self.remove() From 761b99d3cb93d134908afba66f81aca6f4b242d6 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 1 Oct 2023 21:34:25 +0600 Subject: [PATCH 0513/1051] Update msgpack.txt (#8548) --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index e0ee0a59187..350d3c7790d 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.5 +msgpack==1.0.6 From fb0951866c2e388ea5a13822b3afde604323e7ef Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 1 Oct 2023 21:50:03 +0600 Subject: [PATCH 0514/1051] Update auth.txt (#8547) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 988a9e635d9..7e668341b53 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.3 +cryptography==41.0.4 From 53f300022c4abb8f05c899618e6f44038f088cc8 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 3 Oct 2023 20:02:47 +0600 Subject: [PATCH 0515/1051] Update msgpack.txt to fix build issues (#8552) --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index 350d3c7790d..990f76ab16b 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.6 +msgpack==1.0.7 From 14892abbb8cf80d7abcf41f4a48c049d84f69f74 Mon Sep 17 00:00:00 2001 From: "Justin@Q2" <109311040+q2justin@users.noreply.github.com> Date: Wed, 4 Oct 2023 12:32:01 -0400 Subject: [PATCH 0516/1051] Basic ElasticSearch / ElasticClient 8.x Support (#8519) * Basic ElasticSearch / ElasticClient 8.x Support * 7.x and 8.x both support scheme being in the URI removed if check for clientside version 7.x or below as specifying the scheme in the URI works like it does with the 8.x client. * relax elasticversion requirements Support up to 8.9 python client. elasticsearch 7.x client does not work with elasticsearch 8.x server and vice versa. * Update requirements/extras/elasticsearch.txt * Update requirements/extras/elasticsearch.txt * Fixed Tests and exceptions * Update requirements/extras/elasticsearch.txt * Fixed linting issues * Update requirements/extras/elasticsearch.txt * Update requirements/extras/elasticsearch.txt * Added with_doctype versions of selected tests to help improve test coverage. --------- Co-authored-by: Asif Saif Uddin --- celery/backends/elasticsearch.py | 95 ++++++++---- requirements/extras/elasticsearch.txt | 3 +- t/unit/backends/test_elasticsearch.py | 202 +++++++++++++++++++------- 3 files changed, 220 insertions(+), 80 deletions(-) diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index 544812979c5..cb4ca4da0fd 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -14,6 +14,11 @@ except ImportError: elasticsearch = None +try: + import elastic_transport +except ImportError: + elastic_transport = None + __all__ = ('ElasticsearchBackend',) E_LIB_MISSING = """\ @@ -31,7 +36,7 @@ class ElasticsearchBackend(KeyValueStoreBackend): """ index = 'celery' - doc_type = 'backend' + doc_type = None scheme = 'http' host = 'localhost' port = 9200 @@ -83,17 +88,17 @@ def __init__(self, url=None, *args, **kwargs): self._server = None def exception_safe_to_retry(self, exc): - if isinstance(exc, (elasticsearch.exceptions.TransportError)): + if isinstance(exc, elasticsearch.exceptions.ApiError): # 401: Unauthorized # 409: Conflict - # 429: Too Many Requests # 500: Internal Server Error # 502: Bad Gateway - # 503: Service Unavailable # 504: Gateway Timeout # N/A: Low level exception (i.e. socket exception) - if exc.status_code in {401, 409, 429, 500, 502, 503, 504, 'N/A'}: + if exc.status_code in {401, 409, 500, 502, 504, 'N/A'}: return True + if isinstance(exc , elasticsearch.exceptions.TransportError): + return True return False def get(self, key): @@ -108,11 +113,17 @@ def get(self, key): pass def _get(self, key): - return self.server.get( - index=self.index, - doc_type=self.doc_type, - id=key, - ) + if self.doc_type: + return self.server.get( + index=self.index, + id=key, + doc_type=self.doc_type, + ) + else: + return self.server.get( + index=self.index, + id=key, + ) def _set_with_state(self, key, value, state): body = { @@ -135,14 +146,23 @@ def set(self, key, value): def _index(self, id, body, **kwargs): body = {bytes_to_str(k): v for k, v in body.items()} - return self.server.index( - id=bytes_to_str(id), - index=self.index, - doc_type=self.doc_type, - body=body, - params={'op_type': 'create'}, - **kwargs - ) + if self.doc_type: + return self.server.index( + id=bytes_to_str(id), + index=self.index, + doc_type=self.doc_type, + body=body, + params={'op_type': 'create'}, + **kwargs + ) + else: + return self.server.index( + id=bytes_to_str(id), + index=self.index, + body=body, + params={'op_type': 'create'}, + **kwargs + ) def _update(self, id, body, state, **kwargs): """Update state in a conflict free manner. @@ -182,19 +202,32 @@ def _update(self, id, body, state, **kwargs): prim_term = res_get.get('_primary_term', 1) # try to update document with current seq_no and primary_term - res = self.server.update( - id=bytes_to_str(id), - index=self.index, - doc_type=self.doc_type, - body={'doc': body}, - params={'if_primary_term': prim_term, 'if_seq_no': seq_no}, - **kwargs - ) + if self.doc_type: + res = self.server.update( + id=bytes_to_str(id), + index=self.index, + doc_type=self.doc_type, + body={'doc': body}, + params={'if_primary_term': prim_term, 'if_seq_no': seq_no}, + **kwargs + ) + else: + res = self.server.update( + id=bytes_to_str(id), + index=self.index, + body={'doc': body}, + params={'if_primary_term': prim_term, 'if_seq_no': seq_no}, + **kwargs + ) # result is elastic search update query result # noop = query did not update any document # updated = at least one document got updated if res['result'] == 'noop': - raise elasticsearch.exceptions.ConflictError(409, 'conflicting update occurred concurrently', {}) + raise elasticsearch.exceptions.ConflictError( + "conflicting update occurred concurrently", + elastic_transport.ApiResponseMeta(409, "HTTP/1.1", + elastic_transport.HttpHeaders(), 0, elastic_transport.NodeConfig( + self.scheme, self.host, self.port)), None) return res def encode(self, data): @@ -225,7 +258,10 @@ def mget(self, keys): return [self.get(key) for key in keys] def delete(self, key): - self.server.delete(index=self.index, doc_type=self.doc_type, id=key) + if self.doc_type: + self.server.delete(index=self.index, id=key, doc_type=self.doc_type) + else: + self.server.delete(index=self.index, id=key) def _get_server(self): """Connect to the Elasticsearch server.""" @@ -233,11 +269,10 @@ def _get_server(self): if self.username and self.password: http_auth = (self.username, self.password) return elasticsearch.Elasticsearch( - f'{self.host}:{self.port}', + f'{self.scheme}://{self.host}:{self.port}', retry_on_timeout=self.es_retry_on_timeout, max_retries=self.es_max_retries, timeout=self.es_timeout, - scheme=self.scheme, http_auth=http_auth, ) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 79b70ac0eb7..3ae47451b5f 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1 +1,2 @@ -elasticsearch<8.0 +elasticsearch<=8.10.0 +elastic-transport==8.4.1 diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index 45f8a6fb092..a53fe512984 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -12,6 +12,13 @@ except ImportError: exceptions = None +try: + from elastic_transport import ApiResponseMeta, HttpHeaders, NodeConfig +except ImportError: + ApiResponseMeta = None + HttpHeaders = None + NodeConfig = None + from celery.app import backends from celery.backends import elasticsearch as module from celery.backends.elasticsearch import ElasticsearchBackend @@ -53,11 +60,27 @@ def test_get(self): assert dict_result == sentinel.result x._server.get.assert_called_once_with( - doc_type=x.doc_type, id=sentinel.task_id, index=x.index, ) + def test_get_with_doctype(self): + x = ElasticsearchBackend(app=self.app) + x._server = Mock() + x._server.get = Mock() + # expected result + x.doc_type = "_doc" + r = {'found': True, '_source': {'result': sentinel.result}} + x._server.get.return_value = r + dict_result = x.get(sentinel.task_id) + + assert dict_result == sentinel.result + x._server.get.assert_called_once_with( + id=sentinel.task_id, + index=x.index, + doc_type=x.doc_type, + ) + def test_get_none(self): x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -67,7 +90,6 @@ def test_get_none(self): assert none_result is None x._server.get.assert_called_once_with( - doc_type=x.doc_type, id=sentinel.task_id, index=x.index, ) @@ -76,7 +98,9 @@ def test_get_task_not_found(self): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.get.side_effect = [ - exceptions.NotFoundError(404, '{"_index":"celery","_type":"_doc","_id":"toto","found":false}', + exceptions.NotFoundError('{"_index":"celery","_type":"_doc","_id":"toto","found":false}', + ApiResponseMeta(404, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), {'_index': 'celery', '_type': '_doc', '_id': 'toto', 'found': False}) ] @@ -101,11 +125,23 @@ def test_delete(self): assert x.delete(sentinel.task_id) is None x._server.delete.assert_called_once_with( - doc_type=x.doc_type, id=sentinel.task_id, index=x.index, ) + def test_delete_with_doctype(self): + x = ElasticsearchBackend(app=self.app) + x._server = Mock() + x._server.delete = Mock() + x._server.delete.return_value = sentinel.result + x.doc_type = "_doc" + assert x.delete(sentinel.task_id) is None + x._server.delete.assert_called_once_with( + id=sentinel.task_id, + index=x.index, + doc_type=x.doc_type, + ) + def test_backend_by_url(self, url='elasticsearch://localhost:9200/index'): backend, url_ = backends.by_url(url, self.app.loader) @@ -120,7 +156,9 @@ def test_index_conflict(self, datetime_mock): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.get.return_value = { @@ -136,6 +174,46 @@ def test_index_conflict(self, datetime_mock): x._set_with_state(sentinel.task_id, sentinel.result, sentinel.state) + assert x._server.get.call_count == 1 + x._server.index.assert_called_once_with( + id=sentinel.task_id, + index=x.index, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + params={'op_type': 'create'}, + ) + x._server.update.assert_called_once_with( + id=sentinel.task_id, + index=x.index, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + params={'if_seq_no': 2, 'if_primary_term': 1} + ) + + @patch('celery.backends.elasticsearch.datetime') + def test_index_conflict_with_doctype(self, datetime_mock): + expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) + datetime_mock.utcnow.return_value = expected_dt + + x = ElasticsearchBackend(app=self.app) + x._server = Mock() + x._server.index.side_effect = [ + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) + ] + x.doc_type = "_doc" + x._server.get.return_value = { + 'found': True, + '_source': {"result": _RESULT_RETRY}, + '_seq_no': 2, + '_primary_term': 1, + } + + x._server.update.return_value = { + 'result': 'updated' + } + + x._set_with_state(sentinel.task_id, sentinel.result, sentinel.state) + assert x._server.get.call_count == 1 x._server.index.assert_called_once_with( id=sentinel.task_id, @@ -160,7 +238,9 @@ def test_index_conflict_without_state(self, datetime_mock): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.get.return_value = { @@ -180,14 +260,12 @@ def test_index_conflict_without_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @@ -205,7 +283,9 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.get.return_value = { @@ -225,14 +305,12 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @@ -245,7 +323,9 @@ def test_index_conflict_with_existing_success(self, datetime_mock): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.get.return_value = { @@ -267,7 +347,6 @@ def test_index_conflict_with_existing_success(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, params={'op_type': 'create'}, ) @@ -281,7 +360,9 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): x = ElasticsearchBackend(app=self.app) x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.get.return_value = { @@ -301,7 +382,6 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - doc_type=x.doc_type, body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, params={'op_type': 'create'}, ) @@ -354,7 +434,10 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): sleep_mock = Mock() x._sleep = sleep_mock x._server = Mock() - x._server.index.side_effect = exceptions.ConflictError(409, "concurrent update", {}) + x._server.index.side_effect = exceptions.ConflictError( + "concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, NodeConfig("https", "localhost", 9200)), + None) x._server.get.side_effect = x_server_get_side_effect x._server.update.side_effect = [ {'result': 'noop'}, @@ -370,7 +453,6 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -380,7 +462,6 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -392,7 +473,6 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'doc': { 'result': expected_result, @@ -404,7 +484,6 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'doc': { 'result': expected_result, @@ -440,7 +519,9 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es x._sleep = sleep_mock x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}), + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None), {'result': 'created'} ] @@ -451,10 +532,10 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es '_seq_no': 2, '_primary_term': 1, }, - exceptions.NotFoundError(404, - '{"_index":"celery","_type":"_doc","_id":"toto","found":false}', - {'_index': 'celery', '_type': '_doc', - '_id': 'toto', 'found': False}), + exceptions.NotFoundError('{"_index":"celery","_type":"_doc","_id":"toto","found":false}', + ApiResponseMeta(404, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), + {'_index': 'celery', '_type': '_doc', '_id': 'toto', 'found': False}), ] result_meta = x._get_result_meta(result, states.SUCCESS, None, None) @@ -467,7 +548,6 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -477,7 +557,6 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -511,7 +590,9 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date x._sleep = sleep_mock x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}), + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None), {'result': 'created'} ] @@ -535,7 +616,6 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -545,7 +625,6 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date call( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -579,7 +658,9 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, x._sleep = sleep_mock x._server = Mock() x._server.index.side_effect = [ - exceptions.ConflictError(409, "concurrent update", {}) + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None) ] x._server.update.side_effect = [ @@ -602,7 +683,6 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, x._server.index.assert_called_once_with( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'result': expected_result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z' @@ -612,7 +692,6 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, x._server.update.assert_called_once_with( id=encoded_task_id, index=x.index, - doc_type=x.doc_type, body={ 'doc': { 'result': expected_result, @@ -629,7 +708,7 @@ def test_backend_params_by_url(self): x = app.backend assert x.index == 'index' - assert x.doc_type == 'doc_type' + assert x.doc_type == "doc_type" assert x.scheme == 'http' assert x.host == 'localhost' assert x.port == 9200 @@ -640,7 +719,7 @@ def test_backend_url_no_params(self): x = app.backend assert x.index == 'celery' - assert x.doc_type == 'backend' + assert x.doc_type is None assert x.scheme == 'http' assert x.host == 'localhost' assert x.port == 9200 @@ -657,11 +736,10 @@ def test_get_server_with_auth(self, mock_es_client): x._get_server() mock_es_client.assert_called_once_with( - 'localhost:9200', + 'https://localhost:9200', http_auth=('fake_user', 'fake_pass'), max_retries=x.es_max_retries, retry_on_timeout=x.es_retry_on_timeout, - scheme='https', timeout=x.es_timeout, ) @@ -672,17 +750,15 @@ def test_get_server_without_auth(self, mock_es_client): x = app.backend x._get_server() mock_es_client.assert_called_once_with( - 'localhost:9200', + 'http://localhost:9200', http_auth=None, max_retries=x.es_max_retries, retry_on_timeout=x.es_retry_on_timeout, - scheme='http', timeout=x.es_timeout, ) def test_index(self): x = ElasticsearchBackend(app=self.app) - x.doc_type = 'test-doc-type' x._server = Mock() x._server.index = Mock() expected_result = { @@ -699,16 +775,39 @@ def test_index(self): ) x._server.index.assert_called_once_with( id=str(sentinel.task_id), - doc_type=x.doc_type, index=x.index, body=body, params={'op_type': 'create'}, kwarg1='test1' ) + def test_index_with_doctype(self): + x = ElasticsearchBackend(app=self.app) + x._server = Mock() + x._server.index = Mock() + expected_result = { + '_id': sentinel.task_id, + '_source': {'result': sentinel.result} + } + x._server.index.return_value = expected_result + x.doc_type = "_doc" + body = {"field1": "value1"} + x._index( + id=str(sentinel.task_id).encode(), + body=body, + kwarg1='test1' + ) + x._server.index.assert_called_once_with( + id=str(sentinel.task_id), + index=x.index, + doc_type=x.doc_type, + body=body, + params={'op_type': 'create'}, + kwarg1='test1' + ) + def test_index_bytes_key(self): x = ElasticsearchBackend(app=self.app) - x.doc_type = 'test-doc-type' x._server = Mock() x._server.index = Mock() expected_result = { @@ -725,7 +824,6 @@ def test_index_bytes_key(self): ) x._server.index.assert_called_once_with( id=str(sentinel.task_id), - doc_type=x.doc_type, index=x.index, body={"field1": "value1"}, params={'op_type': 'create'}, @@ -854,15 +952,21 @@ def test_mget(self): ] assert x.mget([sentinel.task_id1, sentinel.task_id2]) == [sentinel.result1, sentinel.result2] x._server.get.assert_has_calls([ - call(index=x.index, doc_type=x.doc_type, id=sentinel.task_id1), - call(index=x.index, doc_type=x.doc_type, id=sentinel.task_id2), + call(index=x.index, id=sentinel.task_id1), + call(index=x.index, id=sentinel.task_id2), ]) def test_exception_safe_to_retry(self): x = ElasticsearchBackend(app=self.app) assert not x.exception_safe_to_retry(Exception("failed")) assert not x.exception_safe_to_retry(BaseException("failed")) - assert x.exception_safe_to_retry(exceptions.ConflictError(409, "concurrent update", {})) - assert x.exception_safe_to_retry(exceptions.ConnectionError(503, "service unavailable", {})) - assert x.exception_safe_to_retry(exceptions.TransportError(429, "too many requests", {})) - assert not x.exception_safe_to_retry(exceptions.NotFoundError(404, "not found", {})) + assert x.exception_safe_to_retry( + exceptions.ConflictError("concurrent update", + ApiResponseMeta(409, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None)) + assert x.exception_safe_to_retry(exceptions.ConnectionError("service unavailable")) + assert x.exception_safe_to_retry(exceptions.TransportError("too many requests")) + assert not x.exception_safe_to_retry( + exceptions.NotFoundError("not found", + ApiResponseMeta(404, "HTTP/1.1", HttpHeaders(), 0, + NodeConfig("https", "localhost", 9200)), None)) From 06390779a0d04b494c65462208e24f8cd4475571 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliv=C3=A9r=20Kecskem=C3=A9ty?= Date: Sun, 8 Oct 2023 13:51:18 +0200 Subject: [PATCH 0517/1051] Fix eager tasks does not populate name field (#8486) * Add task name to eager request * Add task name to eager result * Add tests * Add an extra check to make sure name is populated in EagerResults --- celery/app/task.py | 3 ++- celery/result.py | 4 +++- t/unit/tasks/test_result.py | 7 +++++++ t/unit/tasks/test_tasks.py | 16 ++++++++++++++++ 4 files changed, 28 insertions(+), 2 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index cceb2a09ccd..a23254d3a26 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -788,6 +788,7 @@ def apply(self, args=None, kwargs=None, request = { 'id': task_id, + 'task': self.name, 'retries': retries, 'is_eager': True, 'logfile': logfile, @@ -824,7 +825,7 @@ def apply(self, args=None, kwargs=None, if isinstance(retval, Retry) and retval.sig is not None: return retval.sig.apply(retries=retries + 1) state = states.SUCCESS if ret.info is None else ret.info.state - return EagerResult(task_id, retval, state, traceback=tb) + return EagerResult(task_id, retval, state, traceback=tb, name=self.name) def AsyncResult(self, task_id, **kwargs): """Get AsyncResult instance for the specified task. diff --git a/celery/result.py b/celery/result.py index 065d9ca5158..75512c5aadb 100644 --- a/celery/result.py +++ b/celery/result.py @@ -984,13 +984,14 @@ def restore(cls, id, backend=None, app=None): class EagerResult(AsyncResult): """Result that we know has already been executed.""" - def __init__(self, id, ret_value, state, traceback=None): + def __init__(self, id, ret_value, state, traceback=None, name=None): # pylint: disable=super-init-not-called # XXX should really not be inheriting from AsyncResult self.id = id self._result = ret_value self._state = state self._traceback = traceback + self._name = name self.on_ready = promise() self.on_ready(self) @@ -1043,6 +1044,7 @@ def _cache(self): 'result': self._result, 'status': self._state, 'traceback': self._traceback, + 'name': self._name, } @property diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 42eaab8987d..30e0b9ef134 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -967,6 +967,13 @@ def test_get_sync_subtask_option(self, task_join_will_block): res_subtask_async.get() res_subtask_async.get(disable_sync_subtasks=False) + def test_populate_name(self): + res = EagerResult('x', 'x', states.SUCCESS, None, 'test_task') + assert res.name == 'test_task' + + res = EagerResult('x', 'x', states.SUCCESS, name='test_task_named_argument') + assert res.name == 'test_task_named_argument' + class test_tuples: diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 5cff1c3db07..10a373ef54b 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1441,6 +1441,7 @@ def test_apply(self): assert e.successful() assert e.ready() + assert e.name == 't.unit.tasks.test_tasks.increment_counter' assert repr(e).startswith(' Date: Mon, 9 Oct 2023 16:49:41 +0000 Subject: [PATCH 0518/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.13.0 → v3.15.0](https://github.com/asottile/pyupgrade/compare/v3.13.0...v3.15.0) - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1a258458959..61b60e2ac0b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.13.0 + rev: v3.15.0 hooks: - id: pyupgrade args: ["--py38-plus"] @@ -17,7 +17,7 @@ repos: exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-merge-conflict - id: check-toml From 3826228c9c2d67bdceb2497dc94e3af98452f01c Mon Sep 17 00:00:00 2001 From: Amrit Rathie Date: Tue, 10 Oct 2023 12:18:57 -0500 Subject: [PATCH 0519/1051] Fix typo in celery.app.control (#8563) --- celery/app/control.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/control.py b/celery/app/control.py index 52763e8a5f5..73b5162e851 100644 --- a/celery/app/control.py +++ b/celery/app/control.py @@ -360,7 +360,7 @@ def query_task(self, *ids): * ``routing_key`` - Routing key used when task was published * ``priority`` - Priority used when task was published * ``redelivered`` - True if the task was redelivered - * ``worker_pid`` - PID of worker processin the task + * ``worker_pid`` - PID of worker processing the task """ # signature used be unary: query_task(ids=[id1, id2]) From 65db1447390dbdfb002c4d760d58c56ee07dfb7e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 12 Oct 2023 21:23:26 +0600 Subject: [PATCH 0520/1051] Update solar.txt ephem (#8566) --- requirements/extras/solar.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/solar.txt b/requirements/extras/solar.txt index 2b7a44d1864..318354cc7ed 100644 --- a/requirements/extras/solar.txt +++ b/requirements/extras/solar.txt @@ -1 +1 @@ -ephem==4.1.4; platform_python_implementation!="PyPy" +ephem==4.1.5; platform_python_implementation!="PyPy" From 4d18666951d9271f68cbe2927c396990db7febf9 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 12 Oct 2023 21:23:59 +0600 Subject: [PATCH 0521/1051] Update test.txt pytest-timeout (#8565) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 0900248ada6..6f977121877 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,7 +1,7 @@ pytest==7.4.2 pytest-celery==0.0.0 pytest-subtests==0.11.0 -pytest-timeout==2.1.0 +pytest-timeout==2.2.0 pytest-click==1.1.0 pytest-order==1.1.0 boto3>=1.26.143 From fcecf18ae3bc38e866c91bc76e48c3d788482d86 Mon Sep 17 00:00:00 2001 From: Robert Collins Date: Tue, 17 Oct 2023 06:45:29 +1300 Subject: [PATCH 0522/1051] Correct some mypy errors (#8570) * Fix mypy for worker.py * Fix mypy for collections.py * Permit mypy to import saferepr * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/contrib/testing/worker.py | 26 ++++++++++++-------------- celery/utils/collections.py | 3 +-- celery/utils/saferepr.py | 9 ++++++--- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index fa8f6889682..b8d3fc06d87 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -3,10 +3,10 @@ import os import threading from contextlib import contextmanager -from typing import Any, Iterable, Union # noqa +from typing import Any, Iterable, Optional, Union import celery.worker.consumer # noqa -from celery import Celery, worker # noqa +from celery import Celery, worker from celery.result import _set_task_join_will_block, allow_join_result from celery.utils.dispatch import Signal from celery.utils.nodenames import anon_nodename @@ -131,16 +131,15 @@ def start_worker( @contextmanager -def _start_worker_thread(app, - concurrency=1, - pool='solo', - loglevel=WORKER_LOGLEVEL, - logfile=None, - WorkController=TestWorkController, - perform_ping_check=True, - shutdown_timeout=10.0, - **kwargs): - # type: (Celery, int, str, Union[str, int], str, Any, **Any) -> Iterable +def _start_worker_thread(app: Celery, + concurrency: int = 1, + pool: str = 'solo', + loglevel: Union[str, int] = WORKER_LOGLEVEL, + logfile: Optional[str] = None, + WorkController: Any = TestWorkController, + perform_ping_check: bool = True, + shutdown_timeout: float = 10.0, + **kwargs) -> Iterable[worker.WorkController]: """Start Celery worker in a thread. Yields: @@ -211,8 +210,7 @@ def _start_worker_process(app, cluster.stopwait() -def setup_app_for_worker(app, loglevel, logfile) -> None: - # type: (Celery, Union[str, int], str) -> None +def setup_app_for_worker(app: Celery, loglevel: Union[str, int], logfile: str) -> None: """Setup the app to be used for starting an embedded worker.""" app.finalize() app.set_current() diff --git a/celery/utils/collections.py b/celery/utils/collections.py index 6fb559acecf..396ed817cdd 100644 --- a/celery/utils/collections.py +++ b/celery/utils/collections.py @@ -595,8 +595,7 @@ def purge(self, now=None): break # oldest item hasn't expired yet self.pop() - def pop(self, default=None) -> Any: - # type: (Any) -> Any + def pop(self, default: Any = None) -> Any: """Remove and return the oldest item, or :const:`None` when empty.""" while self._heap: _, item = heappop(self._heap) diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py index feddd41f0fd..68768882fc7 100644 --- a/celery/utils/saferepr.py +++ b/celery/utils/saferepr.py @@ -15,7 +15,7 @@ from itertools import chain from numbers import Number from pprint import _recursion -from typing import Any, AnyStr, Callable, Dict, Iterator, List, Sequence, Set, Tuple # noqa +from typing import Any, AnyStr, Callable, Dict, Iterator, List, Optional, Sequence, Set, Tuple # noqa from .text import truncate @@ -194,9 +194,12 @@ def _reprseq(val, lit_start, lit_end, builtin_type, chainer): ) -def reprstream(stack, seen=None, maxlevels=3, level=0, isinstance=isinstance): +def reprstream(stack: deque, + seen: Optional[Set] = None, + maxlevels: int = 3, + level: int = 0, + isinstance: Callable = isinstance) -> Iterator[Any]: """Streaming repr, yielding tokens.""" - # type: (deque, Set, int, int, Callable) -> Iterator[Any] seen = seen or set() append = stack.append popleft = stack.popleft From 3bf2c38c378a3b9cb0c98a62b83b347d09d490b8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 23:38:43 +0300 Subject: [PATCH 0523/1051] [pre-commit.ci] pre-commit autoupdate (#8572) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.1 → v1.6.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.1...v1.6.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 61b60e2ac0b..2650da2f33a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.1 + rev: v1.6.0 hooks: - id: mypy pass_filenames: false From 4d264575e601d76fcedf606ddad538dcef36c631 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 18 Oct 2023 18:13:13 +0600 Subject: [PATCH 0524/1051] Update elasticsearch.txt (#8573) --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 3ae47451b5f..a6dbb7feac1 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.10.0 +elasticsearch<=8.10.1 elastic-transport==8.4.1 From 0bc89cc594638e1d88655764807fcc59fb32efc6 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 18 Oct 2023 19:29:23 +0600 Subject: [PATCH 0525/1051] Update test.txt deps (#8574) --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 6f977121877..97a7df0e233 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,8 +7,8 @@ pytest-order==1.1.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.5.1; platform_python_implementation=="CPython" -pre-commit==3.4.0 +mypy==1.6.0; platform_python_implementation=="CPython" +pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From fe762c3a26e56ff34608244fc04336b438f8fa0c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Oct 2023 21:41:12 +0300 Subject: [PATCH 0526/1051] [pre-commit.ci] pre-commit autoupdate (#8587) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.6.0 → v1.6.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.0...v1.6.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2650da2f33a..07450537784 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.0 + rev: v1.6.1 hooks: - id: mypy pass_filenames: false From b838b058710a3d8d09745859b9c8e7e3b60703c3 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 24 Oct 2023 12:06:13 +0600 Subject: [PATCH 0527/1051] Update test.txt --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 97a7df0e233..736afb96d88 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,7 +7,7 @@ pytest-order==1.1.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.6.0; platform_python_implementation=="CPython" +mypy==1.6.1; platform_python_implementation=="CPython" pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt From 848b8ad97d13ed97cd8a520cd64b459a79c37d96 Mon Sep 17 00:00:00 2001 From: Nikita Frolenkov Date: Thu, 2 Nov 2023 15:21:18 +0500 Subject: [PATCH 0528/1051] Improved the "Next steps" documentation (#8561). --- docs/getting-started/next-steps.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/getting-started/next-steps.rst b/docs/getting-started/next-steps.rst index 286ff41261a..8f8a82b3920 100644 --- a/docs/getting-started/next-steps.rst +++ b/docs/getting-started/next-steps.rst @@ -26,9 +26,10 @@ Our Project Project layout:: - proj/__init__.py - /celery.py - /tasks.py + src/ + proj/__init__.py + /celery.py + /tasks.py :file:`proj/celery.py` ~~~~~~~~~~~~~~~~~~~~~~ @@ -70,7 +71,8 @@ you simply import this instance. Starting the worker ------------------- -The :program:`celery` program can be used to start the worker (you need to run the worker in the directory above proj): +The :program:`celery` program can be used to start the worker (you need to run the worker in the directory above +`proj`, according to the example project layout the directory is `src`): .. code-block:: console From 019fc2400d3dab88998f17d079f2a92814cd9586 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 5 Nov 2023 14:39:14 +0200 Subject: [PATCH 0529/1051] Disabled couchbase tests due to broken package breaking main. --- requirements/test-ci-default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index 5493cae1c99..0ab2b79da06 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -12,7 +12,7 @@ -r extras/thread.txt -r extras/elasticsearch.txt -r extras/couchdb.txt --r extras/couchbase.txt +# -r extras/couchbase.txt -r extras/arangodb.txt -r extras/consul.txt -r extras/cosmosdbsql.txt From c264c8eab13189c1c6a27e859d84a23433b5c361 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 17:06:32 +0600 Subject: [PATCH 0530/1051] Update elasticsearch deps (#8605) --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index a6dbb7feac1..c2238c8cd8e 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.10.1 -elastic-transport==8.4.1 +elastic-transport<=8.10.0 From 6fea26dc849c080ea1ae35679f597f2e9047cc98 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 17:07:20 +0600 Subject: [PATCH 0531/1051] Update cryptography==41.0.5 (#8604) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 7e668341b53..485821aff14 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.4 +cryptography==41.0.5 From 2f5acffb1c87ef774a7a812c1c96a1af2216545c Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 17:18:22 +0600 Subject: [PATCH 0532/1051] Update pytest==7.4.3 (#8606) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 736afb96d88..30203095421 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.2 +pytest==7.4.3 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 From cb4beac2f78bce712bd63963f0ca22113255ec72 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 18:40:20 +0600 Subject: [PATCH 0533/1051] test initial support of python 3.12.x (#8549) * test initial support of python 3.12.0-rc3 * Update .github/workflows/python-package.yml * Update .github/workflows/python-package.yml * Update .github/workflows/python-package.yml Co-authored-by: Adrian --------- Co-authored-by: Tomer Nosrati Co-authored-by: Adrian --- .github/workflows/python-package.yml | 2 +- tox.ini | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 078c5a4fbb5..8eb2f466e2d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', 'pypy-3.9', 'pypy-3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.9', 'pypy-3.10'] os: ["ubuntu-latest", "windows-latest"] exclude: - python-version: 'pypy-3.9' diff --git a/tox.ini b/tox.ini index 0b82e2d3ec0..5d2030062e5 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,8 @@ requires = tox-gh-actions envlist = - {3.8,3.9,3.10,3.11,pypy3}-unit - {3.8,3.9,3.10,3.11,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} + {3.8,3.9,3.10,3.11,3.12,pypy3}-unit + {3.8,3.9,3.10,3.11,3.12,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} flake8 apicheck @@ -17,6 +17,7 @@ python = 3.9: 3.9-unit 3.10: 3.10-unit 3.11: 3.11-unit + 3.12: 3.12-unit pypy-3: pypy3-unit [testenv] @@ -29,7 +30,7 @@ deps= -r{toxinidir}/requirements/test.txt -r{toxinidir}/requirements/pkgutils.txt - 3.8,3.9,3.10,3.11: -r{toxinidir}/requirements/test-ci-default.txt + 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt 3.8,3.9,3.10,3.11: -r{toxinidir}/requirements/docs.txt pypy3: -r{toxinidir}/requirements/test-ci-default.txt @@ -82,6 +83,7 @@ basepython = 3.9: python3.9 3.10: python3.10 3.11: python3.11 + 3.12: python3.12 pypy3: pypy3 mypy: python3.8 lint,apicheck,linkcheck,configcheck,bandit: python3.11 From b449c8f2c6b4efcb92e67c4cba8235b7b7dc3f00 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 21:49:33 +0600 Subject: [PATCH 0534/1051] updated new versions to fix CI (#8607) --- requirements/default.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index 7f24bdc0c06..a26d814df68 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,6 +1,6 @@ -billiard>=4.1.0,<5.0 -kombu>=5.3.2,<6.0 -vine>=5.0.0,<6.0 +billiard>=4.2.0,<5.0 +kombu>=5.3.3,<6.0 +vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 click-repl>=0.2.0 From d7f80b58317b9a4279cdf8f7847de8d061edcdbe Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 6 Nov 2023 22:33:26 +0600 Subject: [PATCH 0535/1051] Update zstd.txt (#8609) --- requirements/extras/zstd.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index d7c173723ed..70ad0df0e95 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.21.0 +zstandard==0.22.0 From a3fc830cfd4c88723e47da4c0686cca5d64fae3c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 7 Nov 2023 07:53:20 +0200 Subject: [PATCH 0536/1051] Fixed CI Support with Python 3.12 (#8611) * Fixed t/unit/tasks/test_result.py::test_del() * Skip t/unit/tasks/test_result.py::test_del() if running with PyPy * Added Python 3.12 to integration tests in the CI --- .github/workflows/python-package.yml | 2 +- t/unit/tasks/test_result.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8eb2f466e2d..e71c47cef18 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -75,7 +75,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] services: diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 30e0b9ef134..1f7f7e08ccf 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -1,5 +1,6 @@ import copy import datetime +import platform import traceback from contextlib import contextmanager from unittest.mock import Mock, call, patch @@ -389,12 +390,17 @@ def test_ready(self): assert not self.app.AsyncResult(uuid()).ready() + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="Mocking here doesn't play well with PyPy", + ) def test_del(self): with patch('celery.result.AsyncResult.backend') as backend: result = self.app.AsyncResult(self.task1['id']) + result.backend = backend result_clone = copy.copy(result) del result - assert backend.remove_pending_result.called_once_with( + backend.remove_pending_result.assert_called_once_with( result_clone ) From de0607ac42607963899e4a7568db81ce88ab7023 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 7 Nov 2023 12:55:42 +0600 Subject: [PATCH 0537/1051] updated CI, docs and classifier for next release (#8613) --- .github/workflows/python-package.yml | 8 ++++++-- README.rst | 8 ++++---- setup.py | 1 + 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e71c47cef18..5a140428f95 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -29,13 +29,17 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.9', 'pypy-3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] os: ["ubuntu-latest", "windows-latest"] exclude: - - python-version: 'pypy-3.9' + - python-version: '3.9' os: "windows-latest" - python-version: 'pypy-3.10' os: "windows-latest" + - python-version: '3.10' + os: "windows-latest" + - python-version: '3.11' + os: "windows-latest" steps: - name: Install apt packages diff --git a/README.rst b/README.rst index cabfbba1d96..39be594a9bd 100644 --- a/README.rst +++ b/README.rst @@ -58,10 +58,10 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.4 runs on: +Celery version 5.3.5 runs on: -- Python (3.8, 3.9, 3.10, 3.11) -- PyPy3.8+ (v7.3.11+) +- Python (3.8, 3.9, 3.10, 3.11, 3.12) +- PyPy3.9+ (v7.3.12+) This is the version of celery which will support Python 3.8 or newer. @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.4 coming from previous versions then you should read our +new to Celery v5.3.5 coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/setup.py b/setup.py index f8587da92f6..6ffcdeb1c3a 100755 --- a/setup.py +++ b/setup.py @@ -174,6 +174,7 @@ def long_description(): "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" From 2a4c42799cd6b9228751a63b56a2007ad6d94ade Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 7 Nov 2023 17:39:27 +0600 Subject: [PATCH 0538/1051] updated dockerfile to add python 3.12 (#8614) * updated dockerfile to add python 3.12 * try to trigger docker CI by pointing to right directory * re organise versions and links * added ca-certificates to fix cert error * re order --- .github/workflows/docker.yml | 4 +-- docker/Dockerfile | 52 +++++++++++++++++++++------------ docker/scripts/install-pyenv.sh | 11 +++---- 3 files changed, 41 insertions(+), 26 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 6f7319c2bca..054fe215089 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -7,7 +7,7 @@ on: - '**.py' - '**.txt' - '**.toml' - - './docker/**' + - '/docker/**' - '.github/workflows/docker.yml' pull_request: branches: [ 'main'] @@ -15,7 +15,7 @@ on: - '**.py' - '**.txt' - '**.toml' - - './docker/**' + - '/docker/**' - '.github/workflows/docker.yml' diff --git a/docker/Dockerfile b/docker/Dockerfile index ddda214a38c..cf8caa131d7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,6 +11,7 @@ RUN apt-get update && apt-get install -y build-essential \ libffi-dev \ tk-dev \ xz-utils \ + ca-certificates \ curl \ lsb-release \ git \ @@ -35,6 +36,9 @@ RUN apt-get update && apt-get install -y build-essential \ # Setup variables. Even though changing these may cause unnecessary invalidation of # unrelated elements, grouping them together makes the Dockerfile read better. ENV PROVISIONING /provisioning +ENV PIP_NO_CACHE_DIR=off +ENV PYTHONDONTWRITEBYTECODE=1 + ARG CELERY_USER=developer @@ -59,13 +63,16 @@ USER $CELERY_USER RUN curl https://pyenv.run | bash # Install required Python versions -RUN pyenv install 3.8 -RUN pyenv install 3.9 -RUN pyenv install 3.10 +RUN pyenv install 3.12 RUN pyenv install 3.11 +RUN pyenv install 3.10 +RUN pyenv install 3.9 +RUN pyenv install 3.8 + + # Set global Python versions -RUN pyenv global 3.8 3.9 3.10 3.11 +RUN pyenv global 3.12 3.11 3.10 3.9 3.8 # Install celery WORKDIR $HOME @@ -74,23 +81,33 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local 3.11 3.10 3.9 3.8 +RUN pyenv local 3.12 3.11 3.10 3.9 3.8 -RUN pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ +RUN pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel + pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel COPY --chown=1000:1000 . $HOME/celery -RUN pyenv exec python3.8 -m pip install -e $HOME/celery && \ - pyenv exec python3.9 -m pip install -e $HOME/celery && \ +RUN pyenv exec python3.12 -m pip install -e $HOME/celery && \ + pyenv exec python3.11 -m pip install -e $HOME/celery && \ pyenv exec python3.10 -m pip install -e $HOME/celery && \ - pyenv exec python3.11 -m pip install -e $HOME/celery + pyenv exec python3.9 -m pip install -e $HOME/celery && \ + pyenv exec python3.8 -m pip install -e $HOME/celery # Setup one celery environment for basic development use -RUN pyenv exec python3.8 -m pip install \ - -r requirements/default.txt \ +RUN pyenv exec python3.12 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt && \ + pyenv exec python3.11 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ @@ -99,8 +116,7 @@ RUN pyenv exec python3.8 -m pip install \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ -r requirements/test.txt && \ - pyenv exec python3.9 -m pip install \ - -r requirements/default.txt \ + pyenv exec python3.10 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ @@ -109,8 +125,7 @@ RUN pyenv exec python3.8 -m pip install \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ -r requirements/test.txt && \ - pyenv exec python3.10 -m pip install \ - -r requirements/default.txt \ + pyenv exec python3.9 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ @@ -119,8 +134,7 @@ RUN pyenv exec python3.8 -m pip install \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ -r requirements/test.txt && \ - pyenv exec python3.11 -m pip install \ - -r requirements/default.txt \ + pyenv exec python3.8 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ diff --git a/docker/scripts/install-pyenv.sh b/docker/scripts/install-pyenv.sh index 76a127ed35f..ed63664fbdc 100644 --- a/docker/scripts/install-pyenv.sh +++ b/docker/scripts/install-pyenv.sh @@ -1,13 +1,14 @@ #!/bin/sh # For managing all the local python installations for testing, use pyenv -curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash +curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash # To enable testing versions like 3.4.8 as 3.4 in tox, we need to alias # pyenv python versions git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias # Python versions to test against -VERSION_ALIAS="python3.10" pyenv install 3.10.1 -VERSION_ALIAS="python3.7" pyenv install 3.7.12 -VERSION_ALIAS="python3.8" pyenv install 3.8.12 -VERSION_ALIAS="python3.9" pyenv install 3.9.9 +VERSION_ALIAS="python3.12" pyenv install 3.12.0 +VERSION_ALIAS="python3.11" pyenv install 3.11.6 +VERSION_ALIAS="python3.10" pyenv install 3.10.13 +VERSION_ALIAS="python3.9" pyenv install 3.9.18 +VERSION_ALIAS="python3.8" pyenv install 3.8.18 From 308255ea4ecd3e98fc752c108e52e1af98064156 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 7 Nov 2023 13:53:45 +0200 Subject: [PATCH 0539/1051] lint,mypy,docker-unit-tests -> Python 3.12 (#8617) * Changed linting tox environments to Python 3.12 * Changed docker-unit-tests to use Python 3.12 --- Makefile | 4 ++-- tox.ini | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 858b4fabfdd..e380095c094 100644 --- a/Makefile +++ b/Makefile @@ -184,14 +184,14 @@ docker-lint: .PHONY: docker-unit-tests docker-unit-tests: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.11-unit -- $(filter-out $@,$(MAKECMDGOALS)) + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-unit -- $(filter-out $@,$(MAKECMDGOALS)) # Integration tests are not fully supported when running in a docker container yet so we allow them to # gracefully fail until fully supported. # TODO: Add documentation (in help command) when fully supported. .PHONY: docker-integration-tests docker-integration-tests: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.11-integration-docker -- --maxfail=1000 + @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-integration-docker -- --maxfail=1000 .PHONY: docker-bash docker-bash: diff --git a/tox.ini b/tox.ini index 5d2030062e5..806b3d977ee 100644 --- a/tox.ini +++ b/tox.ini @@ -31,7 +31,7 @@ deps= -r{toxinidir}/requirements/pkgutils.txt 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt - 3.8,3.9,3.10,3.11: -r{toxinidir}/requirements/docs.txt + 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/docs.txt pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt @@ -85,8 +85,8 @@ basepython = 3.11: python3.11 3.12: python3.12 pypy3: pypy3 - mypy: python3.8 - lint,apicheck,linkcheck,configcheck,bandit: python3.11 + mypy: python3.12 + lint,apicheck,linkcheck,configcheck,bandit: python3.12 usedevelop = True [testenv:mypy] From 8f6679f9137b2336bb6f6aa0528196eab7783574 Mon Sep 17 00:00:00 2001 From: Rob Percival Date: Tue, 7 Nov 2023 11:59:01 +0000 Subject: [PATCH 0540/1051] Correct type of `request` in `task_revoked` documentation (#8616) The documentation erroneously claimed it was a `Request` instance when it is actually a `Context` instance. --- docs/userguide/signals.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/signals.rst b/docs/userguide/signals.rst index 44684727a9f..7aeea8adbf8 100644 --- a/docs/userguide/signals.rst +++ b/docs/userguide/signals.rst @@ -362,7 +362,7 @@ Provides arguments: * ``request`` - This is a :class:`~celery.worker.request.Request` instance, and not + This is a :class:`~celery.app.task.Context` instance, and not ``task.request``. When using the prefork pool this signal is dispatched in the parent process, so ``task.request`` isn't available and shouldn't be used. Use this object instead, as they share many From ae54d4100f976bf4ed4d12544d8acf80803d9cf3 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 7 Nov 2023 21:28:25 +0600 Subject: [PATCH 0541/1051] update docs docker image (#8618) --- docker/docs/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/docs/Dockerfile b/docker/docs/Dockerfile index 711380dde61..3005aa5fba5 100644 --- a/docker/docs/Dockerfile +++ b/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-slim-bullseye +FROM python:3.12-slim-bookworm ENV PYTHONUNBUFFERED 1 ENV PYTHONDONTWRITEBYTECODE 1 From 372300943e1e6a2adaedae1ad0d11ebd1198bd02 Mon Sep 17 00:00:00 2001 From: Nikita Frolenkov Date: Wed, 8 Nov 2023 13:22:49 +0500 Subject: [PATCH 0542/1051] =?UTF-8?q?Fixed=20RecursionError=20caused=20by?= =?UTF-8?q?=20giving=20`config=5Ffrom=5Fobject`=20nested=20mod=E2=80=A6=20?= =?UTF-8?q?(#8619)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fixed RecursionError caused by giving `config_from_object` nested module that does not exist (#8517) * Code cleaning: use "raise from", correct ctx. manager usage in tests (#8517) --------- Co-authored-by: Nikita Frolenkov --- celery/app/base.py | 9 ++++++++- t/unit/app/test_app.py | 12 ++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/celery/app/base.py b/celery/app/base.py index 4846a913bf4..75eee027bb7 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -975,7 +975,14 @@ def _finalize_pending_conf(self): This is used by PendingConfiguration: as soon as you access a key the configuration is read. """ - conf = self._conf = self._load_config() + try: + conf = self._conf = self._load_config() + except AttributeError as err: + # AttributeError is not propagated, it is "handled" by + # PendingConfiguration parent class. This causes + # confusing RecursionError. + raise ModuleNotFoundError(*err.args) from err + return conf def _load_config(self): diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 7aae8f52d74..8f307ebbf0c 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -696,6 +696,18 @@ class Config: assert exc.args[0].startswith('task_default_delivery_mode') assert 'CELERY_DEFAULT_DELIVERY_MODE' in exc.args[0] + def test_config_form_object__module_attr_does_not_exist(self): + module_name = __name__ + attr_name = 'bar' + # the module must exist, but it should not have the config attr + self.app.config_from_object(f'{module_name}.{attr_name}') + + with pytest.raises(ModuleNotFoundError) as exc: + assert self.app.conf.broker_url is None + + assert module_name in exc.value.args[0] + assert attr_name in exc.value.args[0] + def test_config_from_cmdline(self): cmdline = ['task_always_eager=no', 'result_backend=/dev/null', From 4e888810f3780f927bc0f23404448769e60bc028 Mon Sep 17 00:00:00 2001 From: kitsuyui Date: Wed, 8 Nov 2023 19:37:18 +0900 Subject: [PATCH 0543/1051] Fix: serialization error when gossip working (#6566) * Fix: serialization error when gossip working Pass accept when Gossip getting consumers. * Define Receiver mock indirectly mocked by consumer mock * This commit adds tests to serialize based on worker settings. If event_serializer, result_serializer, accept_content are set correctly, it works normally. Otherwise, an error is output to the log. Test that. --- celery/worker/consumer/gossip.py | 1 + t/integration/test_serialization.py | 54 ++++++++++++++++++++++ t/integration/test_serialization_config.py | 5 ++ t/unit/worker/test_consumer.py | 1 + 4 files changed, 61 insertions(+) create mode 100644 t/integration/test_serialization.py create mode 100644 t/integration/test_serialization_config.py diff --git a/celery/worker/consumer/gossip.py b/celery/worker/consumer/gossip.py index 16e1c2ef6b4..509471cadf4 100644 --- a/celery/worker/consumer/gossip.py +++ b/celery/worker/consumer/gossip.py @@ -176,6 +176,7 @@ def get_consumers(self, channel): channel, queues=[ev.queue], on_message=partial(self.on_message, ev.event_from_message), + accept=ev.accept, no_ack=True )] diff --git a/t/integration/test_serialization.py b/t/integration/test_serialization.py new file mode 100644 index 00000000000..329de792675 --- /dev/null +++ b/t/integration/test_serialization.py @@ -0,0 +1,54 @@ +import os +import subprocess +import time +from concurrent.futures import ThreadPoolExecutor + +disabled_error_message = "Refusing to deserialize disabled content of type " + + +class test_config_serialization: + def test_accept(self, celery_app): + app = celery_app + # Redefine env to use in subprocess + # broker_url and result backend are different for each integration test backend + passenv = { + **os.environ, + "CELERY_BROKER_URL": app.conf.broker_url, + "CELERY_RESULT_BACKEND": app.conf.result_backend, + } + with ThreadPoolExecutor(max_workers=2) as executor: + f1 = executor.submit(get_worker_error_messages, "w1", passenv) + f2 = executor.submit(get_worker_error_messages, "w2", passenv) + time.sleep(3) + log1 = f1.result() + log2 = f2.result() + + for log in [log1, log2]: + assert log.find(disabled_error_message) == -1, log + + +def get_worker_error_messages(name, env): + """run a worker and return its stderr + + :param name: the name of the worker + :param env: the environment to run the worker in + + worker must be running in other process because of avoiding conflict.""" + worker = subprocess.Popen( + [ + "celery", + "--config", + "t.integration.test_serialization_config", + "worker", + "-c", + "2", + "-n", + f"{name}@%%h", + ], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + env=env, + ) + worker.terminate() + err = worker.stderr.read().decode("utf-8") + return err diff --git a/t/integration/test_serialization_config.py b/t/integration/test_serialization_config.py new file mode 100644 index 00000000000..a34568e87bc --- /dev/null +++ b/t/integration/test_serialization_config.py @@ -0,0 +1,5 @@ +event_serializer = "pickle" +result_serializer = "pickle" +accept_content = ["pickle", "json"] +worker_redirect_stdouts = False +worker_log_color = False diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index c7e80a0c7de..4a292767136 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -696,6 +696,7 @@ def Consumer(self, hostname='foo@x.com', pid=4312): c.app.connection = _amqp_connection() c.hostname = hostname c.pid = pid + c.app.events.Receiver.return_value = Mock(accept=[]) return c def setup_election(self, g, c): From 7d7b9a7b122e70e5f13ce79716bef51c45a09c58 Mon Sep 17 00:00:00 2001 From: Jan <36926112+jakila@users.noreply.github.com> Date: Thu, 9 Nov 2023 21:17:29 +0100 Subject: [PATCH 0544/1051] fix documentation of broker_connection_max_retries update documentation of broker_connection_max_retries as a value of 0 does not mean "retry forever" --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 3a8fcdd6a5a..d29cf162b27 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -2817,7 +2817,7 @@ Default: 100. Maximum number of retries before we give up re-establishing a connection to the AMQP broker. -If this is set to :const:`0` or :const:`None`, we'll retry forever. +If this is set to :const:`None`, we'll retry forever. ``broker_channel_error_retry`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 8aa8b62532e4d46dfdba5279271a27b19b708d73 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 10 Nov 2023 14:55:26 +0600 Subject: [PATCH 0545/1051] added 2 debian package for better stability in Docker (#8629) * added 2 debian package for better stability * added Dockerfile path --- .github/workflows/docker.yml | 5 +++-- docker/Dockerfile | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 054fe215089..f5e377433d0 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,7 +1,7 @@ name: Docker on: - push: + pull_request: branches: [ 'main'] paths: - '**.py' @@ -9,7 +9,8 @@ on: - '**.toml' - '/docker/**' - '.github/workflows/docker.yml' - pull_request: + - 'Dockerfile' + push: branches: [ 'main'] paths: - '**.py' diff --git a/docker/Dockerfile b/docker/Dockerfile index cf8caa131d7..35b947cc483 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -8,6 +8,8 @@ ARG DEBIAN_FRONTEND=noninteractive # Pypy3 is installed from a package manager because it takes so long to build. RUN apt-get update && apt-get install -y build-essential \ libcurl4-openssl-dev \ + apt-utils \ + debconf \ libffi-dev \ tk-dev \ xz-utils \ From 104c5e1099e966c2b14da08d61d7b91e98dd219a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 10 Nov 2023 19:15:33 +0600 Subject: [PATCH 0546/1051] Added changelog for v5.3.5 (#8623) * Added changelog for v5.3.5 * removed pre-commit made changes * Changelog entry for v5.3.5 release --- Changelog.rst | 90 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index 1438bb21b1c..bb146ff4353 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,96 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. + +. _version-5.3.5: + +5.3.5 +===== + +:release-date: 2023-11-10 7:15 P.M GMT+6 +:release-by: Asif Saif Uddin + + +What's Changed +============== +- Update test.txt versions by @auvipy in https://github.com/celery/celery/pull/8481 +- fix os.getcwd() FileNotFoundError by @mortimer2015 in https://github.com/celery/celery/pull/8448 +- Fix typo in CONTRIBUTING.rst by @monteiro-renato in https://github.com/celery/celery/pull/8494 +- typo(doc): configuration.rst by @shifenhutu in https://github.com/celery/celery/pull/8484 +- assert before raise by @monteiro-renato in https://github.com/celery/celery/pull/8495 +- Update GHA checkout version by @auvipy in https://github.com/celery/celery/pull/8496 +- Fixed replaced_task_nesting by @Nusnus in https://github.com/celery/celery/pull/8500 +- Fix code indentation for route_task() example by @stefmolin in https://github.com/celery/celery/pull/8502 +- support redis 5.x by @dulmandakh in https://github.com/celery/celery/pull/8504 +- Fix typos in test_canvas.py by @monteiro-renato in https://github.com/celery/celery/pull/8498 +- Marked flaky tests by @Nusnus in https://github.com/celery/celery/pull/8508 +- Fix typos in calling.rst by @visitorckw in https://github.com/celery/celery/pull/8506 +- Added support for replaced_task_nesting in chains by @Nusnus in https://github.com/celery/celery/pull/8501 +- Fix typos in canvas.rst by @visitorckw in https://github.com/celery/celery/pull/8509 +- Patch Version Release Checklist by @Nusnus in https://github.com/celery/celery/pull/8488 +- Added Python 3.11 support to Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8511 +- Dependabot (Celery) by @Nusnus in https://github.com/celery/celery/pull/8510 +- Bump actions/checkout from 3 to 4 by @dependabot in https://github.com/celery/celery/pull/8512 +- Update ETA example to include timezone by @amantri in https://github.com/celery/celery/pull/8516 +- Replaces datetime.fromisoformat with the more lenient dateutil parser by @stumpylog in https://github.com/celery/celery/pull/8507 +- Fixed indentation in Dockerfile for Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8527 +- Fix git bug in Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8528 +- Tox lint upgrade from Python 3.9 to Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8526 +- Document gevent concurrency by @cunla in https://github.com/celery/celery/pull/8520 +- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8530 +- Celery Docker Upgrades by @Nusnus in https://github.com/celery/celery/pull/8531 +- pyupgrade upgrade v3.11.0 -> v3.13.0 by @Nusnus in https://github.com/celery/celery/pull/8535 +- Update msgpack.txt by @auvipy in https://github.com/celery/celery/pull/8548 +- Update auth.txt by @auvipy in https://github.com/celery/celery/pull/8547 +- Update msgpack.txt to fix build issues by @auvipy in https://github.com/celery/celery/pull/8552 +- Basic ElasticSearch / ElasticClient 8.x Support by @q2justin in https://github.com/celery/celery/pull/8519 +- Fix eager tasks does not populate name field by @KOliver94 in https://github.com/celery/celery/pull/8486 +- Fix typo in celery.app.control by @Spaceface16518 in https://github.com/celery/celery/pull/8563 +- Update solar.txt ephem by @auvipy in https://github.com/celery/celery/pull/8566 +- Update test.txt pytest-timeout by @auvipy in https://github.com/celery/celery/pull/8565 +- Correct some mypy errors by @rbtcollins in https://github.com/celery/celery/pull/8570 +- Update elasticsearch.txt by @auvipy in https://github.com/celery/celery/pull/8573 +- Update test.txt deps by @auvipy in https://github.com/celery/celery/pull/8574 +- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8590 +- Improved the "Next steps" documentation (#8561). by @frolenkov-nikita in https://github.com/celery/celery/pull/8600 +- Disabled couchbase tests due to broken package breaking main by @Nusnus in https://github.com/celery/celery/pull/8602 +- Update elasticsearch deps by @auvipy in https://github.com/celery/celery/pull/8605 +- Update cryptography==41.0.5 by @auvipy in https://github.com/celery/celery/pull/8604 +- Update pytest==7.4.3 by @auvipy in https://github.com/celery/celery/pull/8606 +- test initial support of python 3.12.x by @auvipy in https://github.com/celery/celery/pull/8549 +- updated new versions to fix CI by @auvipy in https://github.com/celery/celery/pull/8607 +- Update zstd.txt by @auvipy in https://github.com/celery/celery/pull/8609 +- Fixed CI Support with Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8611 +- updated CI, docs and classifier for next release by @auvipy in https://github.com/celery/celery/pull/8613 +- updated dockerfile to add python 3.12 by @auvipy in https://github.com/celery/celery/pull/8614 +- lint,mypy,docker-unit-tests -> Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8617 +- Correct type of `request` in `task_revoked` documentation by @RJPercival in https://github.com/celery/celery/pull/8616 +- update docs docker image by @auvipy in https://github.com/celery/celery/pull/8618 +- Fixed RecursionError caused by giving `config_from_object` nested mod… by @frolenkov-nikita in https://github.com/celery/celery/pull/8619 +- Fix: serialization error when gossip working by @kitsuyui in https://github.com/celery/celery/pull/6566 +* [documentation] broker_connection_max_retries of 0 does not mean "retry forever" by @jakila in https://github.com/celery/celery/pull/8626 +- added 2 debian package for better stability in Docker by @auvipy in https://github.com/celery/celery/pull/8629 + + +New Contributors +================ +- @mortimer2015 made their first contribution in https://github.com/celery/celery/pull/8448 +- @monteiro-renato made their first contribution in https://github.com/celery/celery/pull/8494 +- @shifenhutu made their first contribution in https://github.com/celery/celery/pull/8484 +- @stefmolin made their first contribution in https://github.com/celery/celery/pull/8502 +- @visitorckw made their first contribution in https://github.com/celery/celery/pull/8506 +- @dependabot made their first contribution in https://github.com/celery/celery/pull/8512 +- @amantri made their first contribution in https://github.com/celery/celery/pull/8516 +- @cunla made their first contribution in https://github.com/celery/celery/pull/8520 +- @q2justin made their first contribution in https://github.com/celery/celery/pull/8519 +- @Spaceface16518 made their first contribution in https://github.com/celery/celery/pull/8563 +- @rbtcollins made their first contribution in https://github.com/celery/celery/pull/8570 +- @frolenkov-nikita made their first contribution in https://github.com/celery/celery/pull/8600 +- @RJPercival made their first contribution in https://github.com/celery/celery/pull/8616 +- @kitsuyui made their first contribution in https://github.com/celery/celery/pull/6566 +- @jakila made their first contribution in https://github.com/celery/celery/pull/8626 + + .. _version-5.3.4: 5.3.4 From 8e5efc25a784cf09ca9754dc2bc7002b2dcd989d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 10 Nov 2023 19:19:29 +0600 Subject: [PATCH 0547/1051] =?UTF-8?q?Bump=20version:=205.3.4=20=E2=86=92?= =?UTF-8?q?=205.3.5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 18353538fa5..063b4d708ec 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.4 +current_version = 5.3.5 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 39be594a9bd..ffed765dd5b 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.4 (emerald-rush) +:Version: 5.3.5 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index e11a18c7b7e..582f64e97a0 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.4' +__version__ = '5.3.5' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 6ce97bb020e..91c3561a127 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.4 (emerald-rush) +:Version: 5.3.5 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 40152f0771fc0ae31ea50e44f5b6b5da5667ccb4 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 12 Nov 2023 11:09:45 +0600 Subject: [PATCH 0548/1051] Update Minor-Version-Release-Checklist.md (#8624) * Update Minor-Version-Release-Checklist.md * Update .github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md * Update .github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md * Update .github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md * Update Minor-Version-Release-Checklist.md --- .../Minor-Version-Release-Checklist.md | 131 ++++++++++++++++-- 1 file changed, 121 insertions(+), 10 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md index a6343b27bbc..63e91a5d87c 100644 --- a/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md +++ b/.github/ISSUE_TEMPLATE/Minor-Version-Release-Checklist.md @@ -7,19 +7,130 @@ assignees: '' --- -Version: -Release PR: +# Minor Release Overview: v -# Checklist +This issue will summarize the status and discussion in preparation for the new release. It will be used to track the progress of the release and to ensure that all the necessary steps are taken. It will serve as a checklist for the release and will be used to communicate the status of the release to the community. -- [ ] Release PR drafted -- [ ] Release PR reviewed -- [ ] The main branch build passes +> ⚠️ **Warning:** The release checklist is a living document. It will be updated as the release progresses. Please check back often to ensure that you are up to date with the latest information. - [![Build Status](https://github.com/celery/celery/actions/workflows/python-package.yml/badge.svg)](https://github.com/celery/celery/actions/workflows/python-package.yml) -- [ ] Release Notes -- [ ] What's New +## Checklist +- [ ] Codebase Stability +- [ ] Breaking Changes Validation +- [ ] Compile Changelog +- [ ] Release +- [ ] Release Announcement + +# Release Details +The release manager is responsible for completing the release end-to-end ensuring that all the necessary steps are taken and that the release is completed in a timely manner. This is usually the owner of the release issue but may be assigned to a different maintainer if necessary. + +- Release Manager: +- Release Date: +- Release Branch: `main` -# Release Blockers +# Release Steps +The release manager is expected to execute the checklist below. The release manager is also responsible for ensuring that the checklist is updated as the release progresses. Any changes or issues should be communicated under this issue for centralized tracking. # Potential Release Blockers + +## 1. Codebase Stability +- [ ] The `main` branch build passes + + [![Build Status](https://github.com/celery/celery/actions/workflows/python-package.yml/badge.svg)](https://github.com/celery/celery/actions/workflows/python-package.yml) + +## 2. Breaking Changes Validation +A patch release should not contain any breaking changes. The release manager is responsible for reviewing all of the merged PRs since the last release to ensure that there are no breaking changes. If there are any breaking changes, the release manager should discuss with the maintainers to determine the best course of action if an obvious solution is not apparent. + +## 3. Compile Changelog +The release changelog is set in two different places: +1. The [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) that uses the RST format. +2. The GitHub Release auto-generated changelog that uses the Markdown format. This is auto-generated by the GitHub Draft Release UI. + +> ⚠️ **Warning:** The pre-commit changes should not be included in the changelog. + +To generate the changelog automatically, [draft a new release](https://github.com/celery/celery/releases/new) on GitHub using a fake new version tag for the automatic changelog generation. Notice the actual tag creation is done **on publish** so we can use that to generate the changelog and then delete the draft release without publishing it thus avoiding creating a new tag. + +- Create a new tag +CleanShot 2023-09-05 at 22 06 24@2x + +- Generate Markdown release notes +CleanShot 2023-09-05 at 22 13 39@2x + +- Copy the generated release notes. + +- Delete the draft release without publishing it. + +### 3.1 Changelog.rst +Once you have the actual changes, you need to convert it to rst format and add it to the [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) file. The new version block needs to follow the following format: +```rst +.. _version-x.y.z: + +x.y.z +===== + +:release-date: YYYY-MM-DD HH:MM P.M/A.M TimeZone +:release-by: Release Manager Name + +Changes list in RST format. +``` + +These changes will reflect in the [Change history](https://docs.celeryq.dev/en/stable/changelog.html) section of the documentation. + +### 3.2 Changelog PR +The changes to the [Changelog.rst](https://github.com/celery/celery/blob/main/Changelog.rst) file should be submitted as a PR. This will PR should be the last merged PR before the release. + +## 4. Release +### 4.1 Prepare releasing environment +Before moving forward with the release, the release manager should ensure that bumpversion and twine are installed. These are required to publish the release. + +### 4.2 Bump version +The release manager should bump the version using the following command: +```bash +bumpversion patch +``` +The changes should be pushed directly to main by the release manager. + +At this point, the git log should appear somewhat similar to this: +``` +commit XXX (HEAD -> main, tag: vX.Y.Z, upstream/main, origin/main) +Author: Release Manager +Date: YYY + + Bump version: a.b.c → x.y.z + +commit XXX +Author: Release Manager +Date: YYY + + Added changelog for vX.Y.Z (#1234) +``` +If everything looks good, the bump version commit can be directly pushed to `main`: +```bash +git push origin main --tags +``` + +### 4.3 Publish release to PyPI +The release manager should publish the release to PyPI using the following commands running under the root directory of the repository: +```bash +python setup.py clean build sdist bdist_wheel +``` +If the build is successful, the release manager should publish the release to PyPI using the following command: +```bash +twine upload dist/celery-X.Y.Z* +``` + +> ⚠️ **Warning:** The release manager should double check that the release details are correct (project/version) before publishing the release to PyPI. + +> ⚠️ **Critical Reminder:** Should the released package prove to be faulty or need retraction for any reason, do not delete it from PyPI. The appropriate course of action is to "yank" the release. + +## Release Announcement +After the release is published, the release manager should create a new GitHub Release and set it as the latest release. + +CleanShot 2023-09-05 at 22 51 24@2x + +### Add Release Notes +On a per-case basis, the release manager may also attach an additional release note to the auto-generated release notes. This is usually done when there are important changes that are not reflected in the auto-generated release notes. + +### OpenCollective Update +After successfully publishing the new release, the release manager is responsible for announcing it on the project's OpenCollective [page](https://opencollective.com/celery/updates). This is to engage with the community and keep backers and sponsors in the loop. + + From 2ec20356204779ff60c289919880b7b115e8d6b3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 13 Nov 2023 14:55:55 +0200 Subject: [PATCH 0549/1051] Increased docker-build CI job timeout from 30m -> 60m (#8635) --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f5e377433d0..6b2c67ca5a4 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,7 +23,7 @@ on: jobs: docker-build: runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 60 steps: - uses: actions/checkout@v4 - name: Build Docker container From 7326690383afccc15a2c43b6b0d3f78ed2f7cd8f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 21:36:52 +0200 Subject: [PATCH 0550/1051] [pre-commit.ci] pre-commit autoupdate (#8639) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.6.1 → v1.7.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.1...v1.7.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07450537784..829cf0258b0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.1 + rev: v1.7.0 hooks: - id: mypy pass_filenames: false From bad275039fac8bdf66e8d03928028227aef0f782 Mon Sep 17 00:00:00 2001 From: Asday Date: Sat, 18 Nov 2023 10:13:07 +0000 Subject: [PATCH 0551/1051] Incredibly minor spelling fix. (#8649) --- docs/getting-started/backends-and-brokers/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/getting-started/backends-and-brokers/index.rst b/docs/getting-started/backends-and-brokers/index.rst index 5cb8c899363..92daf812204 100644 --- a/docs/getting-started/backends-and-brokers/index.rst +++ b/docs/getting-started/backends-and-brokers/index.rst @@ -98,6 +98,6 @@ SQLAlchemy SQLAlchemy is a backend. -It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is a ORM, and is the way Celery can use a SQL DB as a result backend. +It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is an ORM, and is the way Celery can use a SQL DB as a result backend. -:ref:`See documentation for details ` \ No newline at end of file +:ref:`See documentation for details ` From 709c5e7b1b6d916e42af17037f841425947b138c Mon Sep 17 00:00:00 2001 From: lyzlisa <34400837+lyzlisa@users.noreply.github.com> Date: Sun, 19 Nov 2023 05:19:06 -0600 Subject: [PATCH 0552/1051] Fix non-zero exit code when receiving remote shutdown (#8650) --- celery/worker/control.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/worker/control.py b/celery/worker/control.py index 41d059e4116..8cbd92cbd0e 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -580,7 +580,7 @@ def autoscale(state, max=None, min=None): def shutdown(state, msg='Got shutdown from remote', **kwargs): """Shutdown worker(s).""" logger.warning(msg) - raise WorkerShutdown(msg) + raise WorkerShutdown(0) # -- Queues From ca1dfbdc2006bc64330bd87e20bd5c0f3e7e51a5 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 19 Nov 2023 17:24:20 +0600 Subject: [PATCH 0553/1051] Update task.py get_custom_headers missing 'compression' key (#8633) --- celery/app/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/task.py b/celery/app/task.py index a23254d3a26..5d55a747b8c 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -104,7 +104,7 @@ def __init__(self, *args, **kwargs): def _get_custom_headers(self, *args, **kwargs): headers = {} headers.update(*args, **kwargs) - celery_keys = {*Context.__dict__.keys(), 'lang', 'task', 'argsrepr', 'kwargsrepr'} + celery_keys = {*Context.__dict__.keys(), 'lang', 'task', 'argsrepr', 'kwargsrepr', 'compression'} for key in celery_keys: headers.pop(key, None) if not headers: From d29afbadffa3081a601f367defa1864ddae9076a Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 19 Nov 2023 18:28:55 +0600 Subject: [PATCH 0554/1051] Update kombu>=5.3.4 to fix SQS request compatibility (#8646) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index a26d814df68..02918bd1eff 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.3.3,<6.0 +kombu>=5.3.4,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 269fa2103812f0bdff3ba298cfff3691093503f5 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 21 Nov 2023 11:03:41 +0600 Subject: [PATCH 0555/1051] test requirements version update (#8655) --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 30203095421..90c9f2fdbfb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -3,11 +3,11 @@ pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 pytest-click==1.1.0 -pytest-order==1.1.0 +pytest-order==1.2.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.6.1; platform_python_implementation=="CPython" +mypy==1.7.0; platform_python_implementation=="CPython" pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt From aaec27a410e9bd147c83e2de5c5e809e6d4f8a94 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 21 Nov 2023 11:04:57 +0600 Subject: [PATCH 0556/1051] Update elasticsearch version (#8656) --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index c2238c8cd8e..50764cdfb64 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.10.1 +elasticsearch<=8.11.0 elastic-transport<=8.10.0 From 3ba50e4e153dabe22ff697c50bbb672c36c691e6 Mon Sep 17 00:00:00 2001 From: John Whitman Date: Tue, 21 Nov 2023 03:00:19 -0500 Subject: [PATCH 0557/1051] Propagates more ImportErrors during autodiscovery (#8632) * Refactors find_related_module tests. * Narrows exception catching. * Makes a narrower assertion. * Cleans up test name. * Tries to address coverage miss. * Cleans up comment. * Fixes typo. * Adds integration test. * Fixes bug on ModuleNotFoundError.name when fails early. * Defaults getattr to None. --- celery/loaders/base.py | 18 ++++++--- t/integration/test_loader.py | 22 +++++++++++ t/unit/app/test_loaders.py | 77 ++++++++++++++++++++++++++++++------ 3 files changed, 100 insertions(+), 17 deletions(-) create mode 100644 t/integration/test_loader.py diff --git a/celery/loaders/base.py b/celery/loaders/base.py index aa7139c78af..f3220882401 100644 --- a/celery/loaders/base.py +++ b/celery/loaders/base.py @@ -253,10 +253,12 @@ def find_related_module(package, related_name): # Django 1.7 allows for specifying a class name in INSTALLED_APPS. # (Issue #2248). try: + # Return package itself when no related_name. module = importlib.import_module(package) if not related_name and module: return module - except ImportError: + except ModuleNotFoundError: + # On import error, try to walk package up one level. package, _, _ = package.rpartition('.') if not package: raise @@ -264,9 +266,13 @@ def find_related_module(package, related_name): module_name = f'{package}.{related_name}' try: + # Try to find related_name under package. return importlib.import_module(module_name) - except ImportError as e: - import_exc_name = getattr(e, 'name', module_name) - if import_exc_name is not None and import_exc_name != module_name: - raise e - return + except ModuleNotFoundError as e: + import_exc_name = getattr(e, 'name', None) + # If candidate does not exist, then return None. + if import_exc_name and module_name.startswith(import_exc_name): + return + + # Otherwise, raise because error probably originated from a nested import. + raise e diff --git a/t/integration/test_loader.py b/t/integration/test_loader.py new file mode 100644 index 00000000000..bc5874227d3 --- /dev/null +++ b/t/integration/test_loader.py @@ -0,0 +1,22 @@ +from celery import shared_task + + +@shared_task() +def dummy_task(x, y): + return x + y + + +class test_loader: + def test_autodiscovery(self, manager): + # Arrange + expected_package_name, _, module_name = __name__.rpartition('.') + unexpected_package_name = 'nonexistent.package.name' + + # Act + manager.app.autodiscover_tasks([expected_package_name, unexpected_package_name], module_name, force=True) + + # Assert + assert f'{expected_package_name}.{module_name}.dummy_task' in manager.app.tasks + assert not any( + task.startswith(unexpected_package_name) for task in manager.app.tasks + ) diff --git a/t/unit/app/test_loaders.py b/t/unit/app/test_loaders.py index 879887ebe9e..213c15b8a19 100644 --- a/t/unit/app/test_loaders.py +++ b/t/unit/app/test_loaders.py @@ -234,19 +234,74 @@ def test_autodiscover_tasks(self): base.autodiscover_tasks(['foo']) frm.assert_called() - def test_find_related_module(self): + # Happy - get something back + def test_find_related_module__when_existent_package_alone(self): with patch('importlib.import_module') as imp: imp.return_value = Mock() imp.return_value.__path__ = 'foo' - assert base.find_related_module('bar', 'tasks').__path__ == 'foo' - imp.assert_any_call('bar') - imp.assert_any_call('bar.tasks') + assert base.find_related_module('foo', None).__path__ == 'foo' + imp.assert_called_once_with('foo') - imp.reset_mock() - assert base.find_related_module('bar', None).__path__ == 'foo' - imp.assert_called_once_with('bar') + def test_find_related_module__when_existent_package_and_related_name(self): + with patch('importlib.import_module') as imp: + first_import = Mock() + first_import.__path__ = 'foo' + second_import = Mock() + second_import.__path__ = 'foo/tasks' + imp.side_effect = [first_import, second_import] + assert base.find_related_module('foo', 'tasks').__path__ == 'foo/tasks' + imp.assert_any_call('foo') + imp.assert_any_call('foo.tasks') + + def test_find_related_module__when_existent_package_parent_and_related_name(self): + with patch('importlib.import_module') as imp: + first_import = ModuleNotFoundError(name='foo.BarApp') # Ref issue #2248 + second_import = Mock() + second_import.__path__ = 'foo/tasks' + imp.side_effect = [first_import, second_import] + assert base.find_related_module('foo.BarApp', 'tasks').__path__ == 'foo/tasks' + imp.assert_any_call('foo.BarApp') + imp.assert_any_call('foo.tasks') + + # Sad - nothing returned + def test_find_related_module__when_package_exists_but_related_name_does_not(self): + with patch('importlib.import_module') as imp: + first_import = Mock() + first_import.__path__ = 'foo' + second_import = ModuleNotFoundError(name='foo.tasks') + imp.side_effect = [first_import, second_import] + assert base.find_related_module('foo', 'tasks') is None + imp.assert_any_call('foo') + imp.assert_any_call('foo.tasks') + + def test_find_related_module__when_existent_package_parent_but_no_related_name(self): + with patch('importlib.import_module') as imp: + first_import = ModuleNotFoundError(name='foo.bar') + second_import = ModuleNotFoundError(name='foo.tasks') + imp.side_effect = [first_import, second_import] + assert base.find_related_module('foo.bar', 'tasks') is None + imp.assert_any_call('foo.bar') + imp.assert_any_call('foo.tasks') + + # Sad - errors + def test_find_related_module__when_no_package_parent(self): + with patch('importlib.import_module') as imp: + non_existent_import = ModuleNotFoundError(name='foo') + imp.side_effect = non_existent_import + with pytest.raises(ModuleNotFoundError) as exc: + base.find_related_module('foo', 'tasks') - imp.side_effect = ImportError() - with pytest.raises(ImportError): - base.find_related_module('bar', 'tasks') - assert base.find_related_module('bar.foo', 'tasks') is None + assert exc.value.name == 'foo' + imp.assert_called_once_with('foo') + + def test_find_related_module__when_nested_import_missing(self): + expected_error = 'dummy import error - e.g. missing nested package' + with patch('importlib.import_module') as imp: + first_import = Mock() + first_import.__path__ = 'foo' + second_import = ModuleNotFoundError(expected_error) + imp.side_effect = [first_import, second_import] + with pytest.raises(ModuleNotFoundError) as exc: + base.find_related_module('foo', 'tasks') + + assert exc.value.msg == expected_error From 9bcc6a90760e8d0c0427ad0a966b61aecee9f72f Mon Sep 17 00:00:00 2001 From: John Whitman Date: Wed, 22 Nov 2023 03:54:44 -0500 Subject: [PATCH 0558/1051] Re-raise ModuleNotFoundError unless for guessed task (#8660) * Modifies integration test to catch actual bad imports. * Only return none when guessed task-module missing. * Cleans up test. * Adds assertion text. --- celery/loaders/base.py | 2 +- t/integration/test_loader.py | 22 +++++++++++++++++++--- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/celery/loaders/base.py b/celery/loaders/base.py index f3220882401..8ac3e5b50e9 100644 --- a/celery/loaders/base.py +++ b/celery/loaders/base.py @@ -271,7 +271,7 @@ def find_related_module(package, related_name): except ModuleNotFoundError as e: import_exc_name = getattr(e, 'name', None) # If candidate does not exist, then return None. - if import_exc_name and module_name.startswith(import_exc_name): + if import_exc_name and module_name == import_exc_name: return # Otherwise, raise because error probably originated from a nested import. diff --git a/t/integration/test_loader.py b/t/integration/test_loader.py index bc5874227d3..a98aa2e85d6 100644 --- a/t/integration/test_loader.py +++ b/t/integration/test_loader.py @@ -1,3 +1,5 @@ +import pytest + from celery import shared_task @@ -7,10 +9,10 @@ def dummy_task(x, y): class test_loader: - def test_autodiscovery(self, manager): + def test_autodiscovery__when_packages_exist(self, manager): # Arrange expected_package_name, _, module_name = __name__.rpartition('.') - unexpected_package_name = 'nonexistent.package.name' + unexpected_package_name = 'datetime.datetime' # Act manager.app.autodiscover_tasks([expected_package_name, unexpected_package_name], module_name, force=True) @@ -19,4 +21,18 @@ def test_autodiscovery(self, manager): assert f'{expected_package_name}.{module_name}.dummy_task' in manager.app.tasks assert not any( task.startswith(unexpected_package_name) for task in manager.app.tasks - ) + ), 'Expected datetime.datetime to neither have test_loader module nor define a Celery task.' + + def test_autodiscovery__when_packages_do_not_exist(self, manager): + # Arrange + existent_package_name, _, module_name = __name__.rpartition('.') + nonexistent_package_name = 'nonexistent.package.name' + + # Act + with pytest.raises(ModuleNotFoundError) as exc: + manager.app.autodiscover_tasks( + [existent_package_name, nonexistent_package_name], module_name, force=True + ) + + # Assert + assert nonexistent_package_name.startswith(exc.value.name), 'Expected to fail on importing "nonexistent"' From 9159e850ecff62e96d69aa30d04f447c40d6d765 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 22 Nov 2023 20:54:58 +0600 Subject: [PATCH 0559/1051] Added changelog for v5.3.6 release (#8659) * Added changelog for v5.3.6 release * Added changelog for v5.3.6 release * Added changelog for v5.3.6 release * Update Changelog.rst --- Changelog.rst | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/Changelog.rst b/Changelog.rst index bb146ff4353..6904989625a 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -9,7 +9,37 @@ in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. -. _version-5.3.5: +.. _version-5.3.6: + +5.3.6 +===== + +:release-date: 2023-11-22 9:15 P.M GMT+6 +:release-by: Asif Saif Uddin + + +This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. +The code changes are mostly fix for regressions. More details can be found below. + +What's Changed +============== +- Increased docker-build CI job timeout from 30m -> 60m by @Nusnus in https://github.com/celery/celery/pull/8635 +- Incredibly minor spelling fix. by @Asday in https://github.com/celery/celery/pull/8649 +- Fix non-zero exit code when receiving remote shutdown by @lyzlisa in https://github.com/celery/celery/pull/8650 +- Update task.py get_custom_headers missing 'compression' key by @auvipy in https://github.com/celery/celery/pull/8633 +- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer by @auvipy in https://github.com/celery/celery/pull/8646 +- test requirements version update by @auvipy in https://github.com/celery/celery/pull/8655 +- Update elasticsearch version by @auvipy in https://github.com/celery/celery/pull/8656 +- Propagates more ImportErrors during autodiscovery by @johnjameswhitman in https://github.com/celery/celery/pull/8632 + +New Contributors +================ +- @Asday made their first contribution in https://github.com/celery/celery/pull/8649 +- @lyzlisa made their first contribution in https://github.com/celery/celery/pull/8650 +- @johnjameswhitman made their first contribution in https://github.com/celery/celery/pull/8632 + + +.. _version-5.3.5: 5.3.5 ===== From b8c67a7a9cc1dfd30b292b4cac955bc8bf7e703f Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 22 Nov 2023 21:12:01 +0600 Subject: [PATCH 0560/1051] =?UTF-8?q?Bump=20version:=205.3.5=20=E2=86=92?= =?UTF-8?q?=205.3.6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 063b4d708ec..412d6ea69b4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.5 +current_version = 5.3.6 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index ffed765dd5b..e0c8ab4abfd 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.5 (emerald-rush) +:Version: 5.3.6 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 582f64e97a0..c60dbd4fe58 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'emerald-rush' -__version__ = '5.3.5' +__version__ = '5.3.6' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 91c3561a127..79eb36eeb34 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.5 (emerald-rush) +:Version: 5.3.6 (emerald-rush) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 26a7831d1ff3dc2a35978b397df0ca15f4938e14 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 Nov 2023 10:22:57 +0200 Subject: [PATCH 0561/1051] New Config: worker_enable_prefetch_count_reduction (#8581) * Added new config: worker_enable_prefetch_count_reduction * Added documentation in userguide * Added unit test: test_restore_prefetch_count_after_connection_restart_negative() * Update celery/worker/consumer/consumer.py Co-authored-by: Omer Katz * PR Fixes --------- Co-authored-by: Omer Katz --- celery/app/defaults.py | 1 + celery/worker/consumer/consumer.py | 30 +++++++++++++---------- docs/userguide/configuration.rst | 39 ++++++++++++++++++++++++++++++ docs/userguide/workers.rst | 3 +++ t/unit/worker/test_consumer.py | 32 ++++++++++++++++++++---- 5 files changed, 87 insertions(+), 18 deletions(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index a9f68689940..2d357134126 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -325,6 +325,7 @@ def __repr__(self): pool_restarts=Option(False, type='bool'), proc_alive_timeout=Option(4.0, type='float'), prefetch_multiplier=Option(4, type='int'), + enable_prefetch_count_reduction=Option(True, type='bool'), redirect_stdouts=Option( True, type='bool', old={'celery_redirect_stdouts'}, ), diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index e072ef57870..cae0b5446ea 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -390,20 +390,21 @@ def on_connection_error_after_connected(self, exc): else: warnings.warn(CANCEL_TASKS_BY_DEFAULT, CPendingDeprecationWarning) - self.initial_prefetch_count = max( - self.prefetch_multiplier, - self.max_prefetch_count - len(tuple(active_requests)) * self.prefetch_multiplier - ) - - self._maximum_prefetch_restored = self.initial_prefetch_count == self.max_prefetch_count - if not self._maximum_prefetch_restored: - logger.info( - f"Temporarily reducing the prefetch count to {self.initial_prefetch_count} to avoid over-fetching " - f"since {len(tuple(active_requests))} tasks are currently being processed.\n" - f"The prefetch count will be gradually restored to {self.max_prefetch_count} as the tasks " - "complete processing." + if self.app.conf.worker_enable_prefetch_count_reduction: + self.initial_prefetch_count = max( + self.prefetch_multiplier, + self.max_prefetch_count - len(tuple(active_requests)) * self.prefetch_multiplier ) + self._maximum_prefetch_restored = self.initial_prefetch_count == self.max_prefetch_count + if not self._maximum_prefetch_restored: + logger.info( + f"Temporarily reducing the prefetch count to {self.initial_prefetch_count} to avoid " + f"over-fetching since {len(tuple(active_requests))} tasks are currently being processed.\n" + f"The prefetch count will be gradually restored to {self.max_prefetch_count} as the tasks " + "complete processing." + ) + def register_with_event_loop(self, hub): self.blueprint.send_all( self, 'register_with_event_loop', args=(hub,), @@ -696,7 +697,10 @@ def on_task_received(message): def _restore_prefetch_count_after_connection_restart(self, p, *args): with self.qos._mutex: - if self._maximum_prefetch_restored: + if any(( + not self.app.conf.worker_enable_prefetch_count_reduction, + self._maximum_prefetch_restored, + )): return new_prefetch_count = min(self.max_prefetch_count, self._new_prefetch_count) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index d29cf162b27..66a4ee71606 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -168,6 +168,7 @@ have been moved into a new ``task_`` prefix. ``CELERYD_POOL_PUTLOCKS`` :setting:`worker_pool_putlocks` ``CELERYD_POOL_RESTARTS`` :setting:`worker_pool_restarts` ``CELERYD_PREFETCH_MULTIPLIER`` :setting:`worker_prefetch_multiplier` +``CELERYD_ENABLE_PREFETCH_COUNT_REDUCTION``:setting:`worker_enable_prefetch_count_reduction` ``CELERYD_REDIRECT_STDOUTS`` :setting:`worker_redirect_stdouts` ``CELERYD_REDIRECT_STDOUTS_LEVEL`` :setting:`worker_redirect_stdouts_level` ``CELERY_SEND_EVENTS`` :setting:`worker_send_task_events` @@ -2969,6 +2970,44 @@ For more on prefetching, read :ref:`optimizing-prefetch-limit` Tasks with ETA/countdown aren't affected by prefetch limits. +.. setting:: worker_enable_prefetch_count_reduction + +``worker_enable_prefetch_count_reduction`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.4 + +Default: Enabled. + +The ``worker_enable_prefetch_count_reduction`` setting governs the restoration behavior of the +prefetch count to its maximum allowable value following a connection loss to the message +broker. By default, this setting is enabled. + +Upon a connection loss, Celery will attempt to reconnect to the broker automatically, +provided the :setting:`broker_connection_retry_on_startup` or :setting:`broker_connection_retry` +is not set to False. During the period of lost connection, the message broker does not keep track +of the number of tasks already fetched. Therefore, to manage the task load effectively and prevent +overloading, Celery reduces the prefetch count based on the number of tasks that are +currently running. + +The prefetch count is the number of messages that a worker will fetch from the broker at +a time. The reduced prefetch count helps ensure that tasks are not fetched excessively +during periods of reconnection. + +With ``worker_enable_prefetch_count_reduction`` set to its default value (Enabled), the prefetch +count will be gradually restored to its maximum allowed value each time a task that was +running before the connection was lost is completed. This behavior helps maintain a +balanced distribution of tasks among the workers while managing the load effectively. + +To disable the reduction and restoration of the prefetch count to its maximum allowed value on +reconnection, set ``worker_enable_prefetch_count_reduction`` to False. Disabling this setting might +be useful in scenarios where a fixed prefetch count is desired to control the rate of task +processing or manage the worker load, especially in environments with fluctuating connectivity. + +The ``worker_enable_prefetch_count_reduction`` setting provides a way to control the +restoration behavior of the prefetch count following a connection loss, aiding in +maintaining a balanced task distribution and effective load management across the workers. + .. setting:: worker_lost_wait ``worker_lost_wait`` diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index ede6a9881d0..cf82c522157 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -160,6 +160,9 @@ tasks that are currently running multiplied by :setting:`worker_prefetch_multipl The prefetch count will be gradually restored to the maximum allowed after each time a task that was running before the connection was lost is complete. +This feature is enabled by default, but can be disabled by setting False +to :setting:`worker_enable_prefetch_count_reduction`. + .. _worker-process-signals: Process Signals diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 4a292767136..6613bd2a40e 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -92,16 +92,21 @@ def test_update_prefetch_count(self): assert c.initial_prefetch_count == 10 * 10 @pytest.mark.parametrize( - 'active_requests_count,expected_initial,expected_maximum', + 'active_requests_count,expected_initial,expected_maximum,enabled', [ - [0, 2, True], - [1, 1, False], - [2, 1, False] + [0, 2, True, True], + [1, 1, False, True], + [2, 1, False, True], + [0, 2, True, False], + [1, 2, True, False], + [2, 2, True, False], ] ) @patch('celery.worker.consumer.consumer.active_requests', new_callable=set) def test_restore_prefetch_count_on_restart(self, active_requests_mock, active_requests_count, - expected_initial, expected_maximum, subtests): + expected_initial, expected_maximum, enabled, subtests): + self.app.conf.worker_enable_prefetch_count_reduction = enabled + reqs = {Mock() for _ in range(active_requests_count)} active_requests_mock.update(reqs) @@ -128,6 +133,23 @@ def bp_start(*_, **__): with subtests.test("maximum prefetch is reached"): assert c._maximum_prefetch_restored is expected_maximum + def test_restore_prefetch_count_after_connection_restart_negative(self): + self.app.conf.worker_enable_prefetch_count_reduction = False + + c = self.get_consumer() + c.qos = Mock() + + # Overcome TypeError: 'Mock' object does not support the context manager protocol + class MutexMock: + def __enter__(self): + pass + + def __exit__(self, *args): + pass + c.qos._mutex = MutexMock() + + assert c._restore_prefetch_count_after_connection_restart(None) is None + def test_create_task_handler(self, subtests): c = self.get_consumer() c.qos = MagicMock() From 07b71b18e38424f22dfbfa6e12302f6539d12a01 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 Nov 2023 13:16:22 +0200 Subject: [PATCH 0562/1051] Added "Serverless" section to Redis doc (redis.rst) (#8640) * Added "Serverless" section to Redis doc (redis.rst) * README.rst :: Sponsors --- README.rst | 10 ++++++-- .../backends-and-brokers/redis.rst | 24 +++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index e0c8ab4abfd..7a2b2411f37 100644 --- a/README.rst +++ b/README.rst @@ -490,11 +490,17 @@ link to your website. [`Become a sponsor`_] .. _`Become a sponsor`: https://opencollective.com/celery#sponsor -|oc-sponsors| +|oc-sponsor-1| |oc-sponsor-2| -.. |oc-sponsors| image:: https://opencollective.com/celery/sponsor/0/avatar.svg +.. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website +.. |oc-sponsor-2| image:: https://upstash.com/logo/upstash-dark-bg.svg + :target: http://upstash.com/?code=celery + :alt: Upstash + :width: 200 + :height: 57 + .. _license: License diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 1924cb5dba2..1b8e688d5be 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -136,6 +136,30 @@ To configure the connection timeouts for the Redis result backend, use the ``ret See :func:`~kombu.utils.functional.retry_over_time` for the possible retry policy options. +.. _redis-serverless: + +Serverless +========== + +Celery supports utilizing a remote serverless Redis, which can significantly +reduce the operational overhead and cost, making it a favorable choice in +microservice architectures or environments where minimizing operational +expenses is crucial. Serverless Redis provides the necessary functionalities +without the need for manual setup, configuration, and management, thus +aligning well with the principles of automation and scalability that Celery promotes. + +Upstash +------- + +`Upstash `_ offers a serverless Redis database service, +providing a seamless solution for Celery users looking to leverage +serverless architectures. Upstash's serverless Redis service is designed +with an eventual consistency model and durable storage, facilitated +through a multi-tier storage architecture. + +Integration with Celery is straightforward as demonstrated +in an `example provided by Upstash `_. + .. _redis-caveats: Caveats From fb8b3caeafc15a6d00f9f06be336a3ca10c6ebdb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 Nov 2023 13:51:34 +0200 Subject: [PATCH 0563/1051] Upstash's Celery example repo link fix due to deprecation of the previous example from the last 24h (#8665) --- docs/getting-started/backends-and-brokers/redis.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 1b8e688d5be..088da6bafd2 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -158,7 +158,7 @@ with an eventual consistency model and durable storage, facilitated through a multi-tier storage architecture. Integration with Celery is straightforward as demonstrated -in an `example provided by Upstash `_. +in an `example provided by Upstash `_. .. _redis-caveats: From a481234d3a530350ba3d04be641aa43654727abc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 18:52:01 +0200 Subject: [PATCH 0564/1051] [pre-commit.ci] pre-commit autoupdate (#8676) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.7.0 → v1.7.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.0...v1.7.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 829cf0258b0..4b266a6f017 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.0 + rev: v1.7.1 hooks: - id: mypy pass_filenames: false From 7ebae1ae080489bbe12c24c4a5eea561ff29a310 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 28 Nov 2023 18:36:10 +0600 Subject: [PATCH 0565/1051] Update mypy version (#8679) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 90c9f2fdbfb..be7af014b73 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -7,7 +7,7 @@ pytest-order==1.2.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.7.0; platform_python_implementation=="CPython" +mypy==1.7.1; platform_python_implementation=="CPython" pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt From 5e30bac4e7c388b5760f63e4af49e56be61f2f9c Mon Sep 17 00:00:00 2001 From: Dan Yishai Date: Sat, 2 Dec 2023 08:27:20 +0200 Subject: [PATCH 0566/1051] Update cryptography dependency (#8690) For CVE-2023-49083 https://nvd.nist.gov/vuln/detail/CVE-2023-49083 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 485821aff14..ab817dd3527 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.5 +cryptography==41.0.7 From a935635e5beb85668350b24afa40053eaeba3bb9 Mon Sep 17 00:00:00 2001 From: lyzlisa <34400837+lyzlisa@users.noreply.github.com> Date: Tue, 5 Dec 2023 15:58:08 -0500 Subject: [PATCH 0567/1051] Add type annotations to `celery/utils/nodenames.py` (#8667) * Add type annotations to `celery/utils/nodenames.py` * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * **extra: Any -> **extra: dict * Update celery/utils/nodenames.py Co-authored-by: Viicos <65306057+Viicos@users.noreply.github.com> * Remove import of `Any` type --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati Co-authored-by: Asif Saif Uddin Co-authored-by: Viicos <65306057+Viicos@users.noreply.github.com> --- celery/utils/nodenames.py | 50 ++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/celery/utils/nodenames.py b/celery/utils/nodenames.py index b3d1a522f09..91509a467ab 100644 --- a/celery/utils/nodenames.py +++ b/celery/utils/nodenames.py @@ -1,4 +1,6 @@ """Worker name utilities.""" +from __future__ import annotations + import os import socket from functools import partial @@ -22,13 +24,18 @@ gethostname = memoize(1, Cache=dict)(socket.gethostname) __all__ = ( - 'worker_direct', 'gethostname', 'nodename', - 'anon_nodename', 'nodesplit', 'default_nodename', - 'node_format', 'host_format', + 'worker_direct', + 'gethostname', + 'nodename', + 'anon_nodename', + 'nodesplit', + 'default_nodename', + 'node_format', + 'host_format', ) -def worker_direct(hostname): +def worker_direct(hostname: str | Queue) -> Queue: """Return the :class:`kombu.Queue` being a direct route to a worker. Arguments: @@ -46,21 +53,20 @@ def worker_direct(hostname): ) -def nodename(name, hostname): +def nodename(name: str, hostname: str) -> str: """Create node name from name/hostname pair.""" return NODENAME_SEP.join((name, hostname)) -def anon_nodename(hostname=None, prefix='gen'): +def anon_nodename(hostname: str | None = None, prefix: str = 'gen') -> str: """Return the nodename for this process (not a worker). This is used for e.g. the origin task message field. """ - return nodename(''.join([prefix, str(os.getpid())]), - hostname or gethostname()) + return nodename(''.join([prefix, str(os.getpid())]), hostname or gethostname()) -def nodesplit(name): +def nodesplit(name: str) -> tuple[None, str] | list[str]: """Split node name into tuple of name/hostname.""" parts = name.split(NODENAME_SEP, 1) if len(parts) == 1: @@ -68,21 +74,21 @@ def nodesplit(name): return parts -def default_nodename(hostname): +def default_nodename(hostname: str) -> str: """Return the default nodename for this process.""" name, host = nodesplit(hostname or '') return nodename(name or NODENAME_DEFAULT, host or gethostname()) -def node_format(s, name, **extra): +def node_format(s: str, name: str, **extra: dict) -> str: """Format worker node name (name@host.com).""" shortname, host = nodesplit(name) - return host_format( - s, host, shortname or NODENAME_DEFAULT, p=name, **extra) + return host_format(s, host, shortname or NODENAME_DEFAULT, p=name, **extra) -def _fmt_process_index(prefix='', default='0'): +def _fmt_process_index(prefix: str = '', default: str = '0') -> str: from .log import current_process_index + index = current_process_index() return f'{prefix}{index}' if index else default @@ -90,13 +96,19 @@ def _fmt_process_index(prefix='', default='0'): _fmt_process_index_with_prefix = partial(_fmt_process_index, '-', '') -def host_format(s, host=None, name=None, **extra): +def host_format(s: str, host: str | None = None, name: str | None = None, **extra: dict) -> str: """Format host %x abbreviations.""" host = host or gethostname() hname, _, domain = host.partition('.') name = name or hname - keys = dict({ - 'h': host, 'n': name, 'd': domain, - 'i': _fmt_process_index, 'I': _fmt_process_index_with_prefix, - }, **extra) + keys = dict( + { + 'h': host, + 'n': name, + 'd': domain, + 'i': _fmt_process_index, + 'I': _fmt_process_index_with_prefix, + }, + **extra, + ) return simple_format(s, keys) From 3617ee8efc97e43276a9308b1f3d5b6943a4aac2 Mon Sep 17 00:00:00 2001 From: Samuel GIFFARD Date: Tue, 5 Dec 2023 21:59:47 +0100 Subject: [PATCH 0568/1051] Issue 3426. Adding myself to the contributors. (#8696) This is a long due edit. This comes from contributions 938407f. The initial commits that initially made this CONTRIBUTORS change were 9146290 and b7bbeeb. They were part of PR #3433, but this line never made it to the cherry-pick. --- CONTRIBUTORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 4b48c1f9b1f..d63caa5ca65 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -220,6 +220,7 @@ Adriano Martins de Jesus, 2016/06/22 Kevin Richardson, 2016/06/29 Andrew Stewart, 2016/07/04 Xin Li, 2016/08/03 +Samuel Giffard, 2016/09/08 Alli Witheford, 2016/09/29 Alan Justino da Silva, 2016/10/14 Marat Sharafutdinov, 2016/11/04 From 17631f7eda712b688294ecb8fa53e4769fe2b1f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 01:59:56 +0200 Subject: [PATCH 0569/1051] Bump actions/setup-python from 4 to 5 (#8701) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5a140428f95..41bdf04ea3d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -48,7 +48,7 @@ jobs: sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -105,7 +105,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' From 6701bd5c291c5e3ade258becb76b79fc3524b82e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 7 Dec 2023 15:10:15 +0200 Subject: [PATCH 0570/1051] Fixed bug where chord.link_error() throws an exception on a dict type errback object (#8702) * Fixed bug where _chord.link_error() would call clone() on a dict instead of a signature * Added unit test: test_flag_allow_error_cb_on_chord_header_with_dict_callback() --- celery/canvas.py | 2 ++ t/unit/tasks/test_canvas.py | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index a4007f0a27f..a32d3eea7e7 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -2271,6 +2271,8 @@ def link_error(self, errback): ``False`` (the current default), then the error callback will only be applied to the body. """ + errback = maybe_signature(errback) + if self.app.conf.task_allow_error_cb_on_chord_header: for task in maybe_list(self.tasks) or []: task.link_error(errback.clone(immutable=True)) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 2c3f4f12f3e..53dc52e5cbb 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1688,6 +1688,14 @@ def test_flag_allow_error_cb_on_chord_header_various_header_types(self): errback = c.link_error(sig) assert errback == sig + @pytest.mark.usefixtures('depends_on_current_app') + def test_flag_allow_error_cb_on_chord_header_with_dict_callback(self): + self.app.conf.task_allow_error_cb_on_chord_header = True + c = chord(group(signature('th1'), signature('th2')), signature('tbody')) + errback_dict = dict(signature('tcb')) + errback = c.link_error(errback_dict) + assert errback == errback_dict + def test_chord__or__group_of_single_task(self): """ Test chaining a chord to a group of a single task. """ c = chord([signature('header')], signature('body')) From 7c907dc1e011c475cb9eeba8c76db9188dab3127 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 19:43:42 +0200 Subject: [PATCH 0571/1051] [pre-commit.ci] pre-commit autoupdate (#8715) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/isort: 5.12.0 → 5.13.0](https://github.com/pycqa/isort/compare/5.12.0...5.13.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4b266a6f017..a7800429fae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.0 hooks: - id: isort From f5d19afedbf5bffa19bcea8f04da26dd37678a03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 11:43:38 +0200 Subject: [PATCH 0572/1051] Bump github/codeql-action from 2 to 3 (#8725) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 65e0f6c8ca5..a1dcabfe893 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -41,7 +41,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -52,7 +52,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -66,4 +66,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 From d1350f9f065ca8f0b5113ccc5cfa1d6dd1c46a88 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 17 Dec 2023 22:38:02 +0200 Subject: [PATCH 0573/1051] Fixed multiprocessing integration tests not running on Mac (#8727) --- t/integration/test_tasks.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 5dc5c955358..223827c2784 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,6 +1,8 @@ import logging +import platform import time from datetime import datetime, timedelta +from multiprocessing import set_start_method from time import perf_counter, sleep from uuid import uuid4 @@ -29,6 +31,16 @@ def flaky(fn): return _timeout(_flaky(fn)) +def set_multiprocessing_start_method(): + """Set multiprocessing start method to 'fork' if not on Linux.""" + if platform.system() != 'Linux': + try: + set_start_method('fork') + except RuntimeError: + # The method is already set + pass + + class test_class_based_tasks: @flaky @@ -89,6 +101,8 @@ def test_basic_task(self, manager): @flaky def test_multiprocess_producer(self, manager): """Testing multiple processes calling tasks.""" + set_multiprocessing_start_method() + from multiprocessing import Pool pool = Pool(20) ret = pool.map(_producer, range(120)) @@ -97,6 +111,8 @@ def test_multiprocess_producer(self, manager): @flaky def test_multithread_producer(self, manager): """Testing multiple threads calling tasks.""" + set_multiprocessing_start_method() + from multiprocessing.pool import ThreadPool pool = ThreadPool(20) ret = pool.map(_producer, range(120)) From 20cdf5e616fe971480f2853384b9e9c2ccf28831 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 18 Dec 2023 19:22:12 +0200 Subject: [PATCH 0574/1051] Added make docker-docs (#8729) * Changed docs service port to a less common value * Added make docker-docs * Added CI workflow for building the docs * Improved error msg if make docker-docs fails * Increased timeout from 10s -> 60s * Reduced docker-docs CI workflow timeout from 60m -> 5m * Improved UI --- .github/workflows/docker.yml | 10 +++++++++- Makefile | 11 +++++++++++ docker/docker-compose.yml | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 6b2c67ca5a4..bc39a2bd3b1 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -27,4 +27,12 @@ jobs: steps: - uses: actions/checkout@v4 - name: Build Docker container - run: make docker-build \ No newline at end of file + run: make docker-build + + docker-docs: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - name: Build Documentation + run: make docker-docs diff --git a/Makefile b/Makefile index e380095c094..5342986415c 100644 --- a/Makefile +++ b/Makefile @@ -59,6 +59,7 @@ help: @echo " docker-lint - Run tox -e lint on docker container." @echo " docker-unit-tests - Run unit tests on docker container, use '-- -k ' for specific test run." @echo " docker-bash - Get a bash shell inside the container." + @echo " docker-docs - Build documentation with docker." clean: clean-docs clean-pyc clean-build @@ -197,6 +198,16 @@ docker-integration-tests: docker-bash: @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery bash +.PHONY: docker-docs +docker-docs: + @docker-compose -f docker/docker-compose.yml up --build -d docs + @echo "Waiting 60 seconds for docs service to build the documentation inside the container..." + @timeout 60 sh -c 'until docker logs $$(docker-compose -f docker/docker-compose.yml ps -q docs) 2>&1 | \ + grep "build succeeded"; do sleep 1; done' || \ + (echo "Error! - run manually: docker compose -f ./docker/docker-compose.yml up --build docs"; \ + docker-compose -f docker/docker-compose.yml logs --tail=50 docs; false) + @docker-compose -f docker/docker-compose.yml down + .PHONY: catch-all %: catch-all @: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index c37501f1dc0..221e6ddb3ef 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -46,5 +46,5 @@ services: volumes: - ../docs:/docs:z ports: - - "7000:7000" + - "7001:7000" command: /start-docs \ No newline at end of file From 04e361509e00dc07a22c09971fc835b84d47fb65 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 19:59:45 +0200 Subject: [PATCH 0575/1051] [pre-commit.ci] pre-commit autoupdate (#8730) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pycqa/isort: 5.13.0 → 5.13.2](https://github.com/pycqa/isort/compare/5.13.0...5.13.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7800429fae..10b034c957a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.13.0 + rev: 5.13.2 hooks: - id: isort From 7a27725cc9bd8d6e7b930a748e854f2d00379d47 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 19 Dec 2023 20:10:25 +0100 Subject: [PATCH 0576/1051] Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) > lib/python3.12/site-packages/celery/app/base.py:940: DeprecationWarning: datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.now(datetime.UTC). now_in_utc = to_utc(datetime.utcnow()) --- celery/app/base.py | 3 ++- celery/backends/mongodb.py | 4 ++-- celery/fixups/django.py | 4 ++-- celery/loaders/base.py | 4 ++-- celery/security/certificate.py | 2 +- celery/utils/time.py | 2 +- celery/worker/worker.py | 6 ++--- t/integration/test_canvas.py | 4 ++-- t/integration/test_inspect.py | 4 ++-- t/integration/test_security.py | 2 +- t/integration/test_tasks.py | 6 ++--- t/unit/app/test_amqp.py | 8 +++---- t/unit/app/test_app.py | 5 +++-- t/unit/app/test_beat.py | 10 ++++----- t/unit/app/test_exceptions.py | 6 ++--- t/unit/app/test_schedules.py | 14 ++++++------ t/unit/backends/test_arangodb.py | 2 +- t/unit/security/test_certificate.py | 4 ++-- t/unit/utils/test_serialization.py | 8 +++---- t/unit/utils/test_time.py | 34 ++++++++++++++--------------- t/unit/worker/test_request.py | 8 +++---- 21 files changed, 71 insertions(+), 69 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 75eee027bb7..78012936e5e 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -6,6 +6,7 @@ import warnings from collections import UserDict, defaultdict, deque from datetime import datetime +from datetime import timezone as datetime_timezone from operator import attrgetter from click.exceptions import Exit @@ -937,7 +938,7 @@ def prepare_config(self, c): def now(self): """Return the current time and date as a datetime.""" - now_in_utc = to_utc(datetime.utcnow()) + now_in_utc = to_utc(datetime.now(datetime_timezone.utc)) return now_in_utc.astimezone(self.timezone) def select_queues(self, queues=None): diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py index c64fe380807..1789f6cf0b0 100644 --- a/celery/backends/mongodb.py +++ b/celery/backends/mongodb.py @@ -1,5 +1,5 @@ """MongoDB result store backend.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from kombu.exceptions import EncodeError from kombu.utils.objects import cached_property @@ -228,7 +228,7 @@ def _save_group(self, group_id, result): meta = { '_id': group_id, 'result': self.encode([i.id for i in result]), - 'date_done': datetime.utcnow(), + 'date_done': datetime.now(timezone.utc), } self.group_collection.replace_one({'_id': group_id}, meta, upsert=True) return result diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 473c3b676b4..adc26db08f8 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -2,7 +2,7 @@ import os import sys import warnings -from datetime import datetime +from datetime import datetime, timezone from importlib import import_module from typing import IO, TYPE_CHECKING, Any, List, Optional, cast @@ -100,7 +100,7 @@ def on_worker_init(self, **kwargs: Any) -> None: self.worker_fixup.install() def now(self, utc: bool = False) -> datetime: - return datetime.utcnow() if utc else self._now() + return datetime.now(timezone.utc) if utc else self._now() def autodiscover_tasks(self) -> List[str]: from django.apps import apps diff --git a/celery/loaders/base.py b/celery/loaders/base.py index 8ac3e5b50e9..01e84254710 100644 --- a/celery/loaders/base.py +++ b/celery/loaders/base.py @@ -3,7 +3,7 @@ import os import re import sys -from datetime import datetime +from datetime import datetime, timezone from kombu.utils import json from kombu.utils.objects import cached_property @@ -62,7 +62,7 @@ def __init__(self, app, **kwargs): def now(self, utc=True): if utc: - return datetime.utcnow() + return datetime.now(timezone.utc) return datetime.now() def on_task_init(self, task_id, task): diff --git a/celery/security/certificate.py b/celery/security/certificate.py index 80398b39f6d..2691904d432 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -43,7 +43,7 @@ def __init__(self, cert: str) -> None: def has_expired(self) -> bool: """Check if the certificate has expired.""" - return datetime.datetime.utcnow() >= self._cert.not_valid_after + return datetime.datetime.now(datetime.timezone.utc) >= self._cert.not_valid_after def get_pubkey(self) -> ( DSAPublicKey | EllipticCurvePublicKey | Ed448PublicKey | Ed25519PublicKey | RSAPublicKey diff --git a/celery/utils/time.py b/celery/utils/time.py index ba94d7951b1..c8fd0959336 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -217,7 +217,7 @@ def remaining( Returns: ~datetime.timedelta: Remaining time. """ - now = now or datetime.utcnow() + now = now or datetime.now(datetime_timezone.utc) if str( start.tzinfo) == str( now.tzinfo) and now.utcoffset() != start.utcoffset(): diff --git a/celery/worker/worker.py b/celery/worker/worker.py index 04f8c30e10d..28609d9d8c5 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -14,7 +14,7 @@ import os import sys -from datetime import datetime +from datetime import datetime, timezone from billiard import cpu_count from kombu.utils.compat import detect_environment @@ -89,7 +89,7 @@ class Blueprint(bootsteps.Blueprint): def __init__(self, app=None, hostname=None, **kwargs): self.app = app or self.app self.hostname = default_nodename(hostname) - self.startup_time = datetime.utcnow() + self.startup_time = datetime.now(timezone.utc) self.app.loader.init_worker() self.on_before_init(**kwargs) self.setup_defaults(**kwargs) @@ -293,7 +293,7 @@ def _maybe_reload_module(self, module, force_reload=False, reloader=None): return reload_from_cwd(sys.modules[module], reloader) def info(self): - uptime = datetime.utcnow() - self.startup_time + uptime = datetime.now(timezone.utc) - self.startup_time return {'total': self.state.total_count, 'pid': os.getpid(), 'clock': str(self.app.clock), diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 5673c5e60c2..b5f88016f82 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2,7 +2,7 @@ import re import tempfile import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import monotonic, sleep import pytest @@ -366,7 +366,7 @@ def test_chain_error_handler_with_eta(self, manager): except NotImplementedError as e: raise pytest.skip(e.args[0]) - eta = datetime.utcnow() + timedelta(seconds=10) + eta = datetime.now(timezone.utc) + timedelta(seconds=10) c = chain( group( add.s(1, 2), diff --git a/t/integration/test_inspect.py b/t/integration/test_inspect.py index 501cf178d36..c6c4b2af814 100644 --- a/t/integration/test_inspect.py +++ b/t/integration/test_inspect.py @@ -1,6 +1,6 @@ import os import re -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import sleep from unittest.mock import ANY @@ -126,7 +126,7 @@ def test_active(self, inspect): @flaky def test_scheduled(self, inspect): """Tests listing scheduled tasks""" - exec_time = datetime.utcnow() + timedelta(seconds=5) + exec_time = datetime.now(timezone.utc) + timedelta(seconds=5) res = add.apply_async([1, 2], {'z': 3}, eta=exec_time) ret = inspect.scheduled() assert len(ret) == 1 diff --git a/t/integration/test_security.py b/t/integration/test_security.py index a6ec3e4a552..36400940439 100644 --- a/t/integration/test_security.py +++ b/t/integration/test_security.py @@ -74,7 +74,7 @@ def gen_private_key(self): def gen_certificate(self, key, common_name, issuer=None, sign_key=None): """generate a certificate with cryptography""" - now = datetime.datetime.utcnow() + now = datetime.datetime.now(datetime.timezone.utc) certificate = x509.CertificateBuilder().subject_name( x509.Name([ diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 223827c2784..10a41f407e0 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,7 +1,7 @@ import logging import platform import time -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from multiprocessing import set_start_method from time import perf_counter, sleep from uuid import uuid4 @@ -154,7 +154,7 @@ def test_expired(self, manager): for _ in range(4): sleeping.delay(2) # Execute task with expiration at now + 1 sec - result = add.apply_async((1, 1), expires=datetime.utcnow() + timedelta(seconds=1)) + result = add.apply_async((1, 1), expires=datetime.now(timezone.utc) + timedelta(seconds=1)) with pytest.raises(celery.exceptions.TaskRevokedError): result.get() assert result.status == 'REVOKED' @@ -180,7 +180,7 @@ def test_eta(self, manager): start = perf_counter() # Schedule task to be executed at time now + 3 seconds - result = add.apply_async((2, 2), eta=datetime.utcnow() + timedelta(seconds=3)) + result = add.apply_async((2, 2), eta=datetime.now(timezone.utc) + timedelta(seconds=3)) sleep(1) assert result.status == 'PENDING' assert result.ready() is False diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index 070002d43f4..acbeecea08a 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from unittest.mock import Mock, patch import pytest @@ -349,14 +349,14 @@ def test_raises_if_kwargs_is_not_mapping(self): self.app.amqp.as_task_v2(uuid(), 'foo', kwargs=(1, 2, 3)) def test_countdown_to_eta(self): - now = to_utc(datetime.utcnow()).astimezone(self.app.timezone) + now = to_utc(datetime.now(timezone.utc)).astimezone(self.app.timezone) m = self.app.amqp.as_task_v2( uuid(), 'foo', countdown=10, now=now, ) assert m.headers['eta'] == (now + timedelta(seconds=10)).isoformat() def test_expires_to_datetime(self): - now = to_utc(datetime.utcnow()).astimezone(self.app.timezone) + now = to_utc(datetime.now(timezone.utc)).astimezone(self.app.timezone) m = self.app.amqp.as_task_v2( uuid(), 'foo', expires=30, now=now, ) @@ -364,7 +364,7 @@ def test_expires_to_datetime(self): now + timedelta(seconds=30)).isoformat() def test_eta_to_datetime(self): - eta = datetime.utcnow() + eta = datetime.now(timezone.utc) m = self.app.amqp.as_task_v2( uuid(), 'foo', eta=eta, ) diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 8f307ebbf0c..4c92f475d42 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -6,6 +6,7 @@ import uuid from copy import deepcopy from datetime import datetime, timedelta +from datetime import timezone as datetime_timezone from pickle import dumps, loads from unittest.mock import Mock, patch @@ -85,7 +86,7 @@ def test_now(self): tz_utc = timezone.get_timezone('UTC') tz_us_eastern = timezone.get_timezone(timezone_setting_value) - now = to_utc(datetime.utcnow()) + now = to_utc(datetime.now(datetime_timezone.utc)) app_now = self.app.now() assert app_now.tzinfo is tz_utc @@ -101,7 +102,7 @@ def test_now(self): assert app_now.tzinfo == tz_us_eastern - diff = to_utc(datetime.utcnow()) - localize(app_now, tz_utc) + diff = to_utc(datetime.now(datetime_timezone.utc)) - localize(app_now, tz_utc) assert diff <= timedelta(seconds=1) # Verify that timezone setting overrides enable_utc=on setting diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 082aeb3a5ef..6b113df426e 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -1,6 +1,6 @@ import errno import sys -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pickle import dumps, loads from unittest.mock import Mock, call, patch @@ -863,17 +863,17 @@ class test_schedule: def test_maybe_make_aware(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.maybe_make_aware(datetime.utcnow()) + d = x.maybe_make_aware(datetime.now(timezone.utc)) assert d.tzinfo x.utc_enabled = False - d2 = x.maybe_make_aware(datetime.utcnow()) + d2 = x.maybe_make_aware(datetime.now(timezone.utc)) assert d2.tzinfo def test_to_local(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.to_local(datetime.utcnow()) + d = x.to_local(datetime.utcnow()) # datetime.utcnow() is deprecated in Python 3.12 assert d.tzinfo is None x.utc_enabled = False - d = x.to_local(datetime.utcnow()) + d = x.to_local(datetime.now(timezone.utc)) assert d.tzinfo diff --git a/t/unit/app/test_exceptions.py b/t/unit/app/test_exceptions.py index b881be4c028..4013c22b0da 100644 --- a/t/unit/app/test_exceptions.py +++ b/t/unit/app/test_exceptions.py @@ -1,5 +1,5 @@ import pickle -from datetime import datetime +from datetime import datetime, timezone from celery.exceptions import Reject, Retry @@ -7,11 +7,11 @@ class test_Retry: def test_when_datetime(self): - x = Retry('foo', KeyError(), when=datetime.utcnow()) + x = Retry('foo', KeyError(), when=datetime.now(timezone.utc)) assert x.humanize() def test_pickleable(self): - x = Retry('foo', KeyError(), when=datetime.utcnow()) + x = Retry('foo', KeyError(), when=datetime.now(timezone.utc)) y = pickle.loads(pickle.dumps(x)) assert x.message == y.message assert repr(x.exc) == repr(y.exc) diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 1f4d5fdd85a..e5a7bfb7bdd 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -1,7 +1,7 @@ import sys import time from contextlib import contextmanager -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pickle import dumps, loads from unittest import TestCase from unittest.mock import Mock @@ -50,17 +50,17 @@ def test_repr(self): def test_is_due(self): self.s.remaining_estimate = Mock(name='rem') self.s.remaining_estimate.return_value = timedelta(seconds=0) - assert self.s.is_due(datetime.utcnow()).is_due + assert self.s.is_due(datetime.now(timezone.utc)).is_due def test_is_due__not_due(self): self.s.remaining_estimate = Mock(name='rem') self.s.remaining_estimate.return_value = timedelta(hours=10) - assert not self.s.is_due(datetime.utcnow()).is_due + assert not self.s.is_due(datetime.now(timezone.utc)).is_due def test_remaining_estimate(self): self.s.cal = Mock(name='cal') - self.s.cal.next_rising().datetime.return_value = datetime.utcnow() - self.s.remaining_estimate(datetime.utcnow()) + self.s.cal.next_rising().datetime.return_value = datetime.now(timezone.utc) + self.s.remaining_estimate(datetime.now(timezone.utc)) def test_coordinates(self): with pytest.raises(ValueError): @@ -82,7 +82,7 @@ def test_event_uses_center(self): s.method = s._methods[ev] s.is_center = s._use_center_l[ev] try: - s.remaining_estimate(datetime.utcnow()) + s.remaining_estimate(datetime.now(timezone.utc)) except TypeError: pytest.fail( f"{s.method} was called with 'use_center' which is not a " @@ -108,7 +108,7 @@ def test_pickle(self): # This is needed for test_crontab_parser because datetime.utcnow doesn't pickle # in python 2 def utcnow(): - return datetime.utcnow() + return datetime.now(timezone.utc) class test_crontab_parser: diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py index 8e86f09b67c..dd1232e0d77 100644 --- a/t/unit/backends/test_arangodb.py +++ b/t/unit/backends/test_arangodb.py @@ -210,7 +210,7 @@ def test_backend_cleanup(self): self.backend.cleanup() self.backend.db.AQLQuery.assert_not_called() - now = datetime.datetime.utcnow() + now = datetime.datetime.now(datetime.timezone.utc) self.backend.app.now = Mock(return_value=now) self.backend.expires = 86400 expected_checkpoint = (now - self.backend.expires_delta).isoformat() diff --git a/t/unit/security/test_certificate.py b/t/unit/security/test_certificate.py index 241527f82df..68b05fa03ee 100644 --- a/t/unit/security/test_certificate.py +++ b/t/unit/security/test_certificate.py @@ -40,7 +40,7 @@ def test_has_expired_mock(self): x = Certificate(CERT1) x._cert = Mock(name='cert') - time_after = datetime.datetime.utcnow() + datetime.timedelta(days=-1) + time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=-1) x._cert.not_valid_after = time_after assert x.has_expired() is True @@ -49,7 +49,7 @@ def test_has_not_expired_mock(self): x = Certificate(CERT1) x._cert = Mock(name='cert') - time_after = datetime.datetime.utcnow() + datetime.timedelta(days=1) + time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1) x._cert.not_valid_after = time_after assert x.has_expired() is False diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py index 9e762d5e8af..5ae68e4f89b 100644 --- a/t/unit/utils/test_serialization.py +++ b/t/unit/utils/test_serialization.py @@ -1,7 +1,7 @@ import json import pickle import sys -from datetime import date, datetime, time, timedelta +from datetime import date, datetime, time, timedelta, timezone from unittest.mock import Mock import pytest @@ -67,9 +67,9 @@ class test_jsonify: Queue('foo'), ['foo', 'bar', 'baz'], {'foo': 'bar'}, - datetime.utcnow(), - datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")), - datetime.utcnow().replace(microsecond=0), + datetime.now(timezone.utc), + datetime.now(timezone.utc).replace(tzinfo=ZoneInfo("UTC")), + datetime.now(timezone.utc).replace(microsecond=0), date(2012, 1, 1), time(hour=1, minute=30), time(hour=1, minute=30, microsecond=3), diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 80d5db973a1..6b955e096e9 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -48,7 +48,7 @@ def test_daylight(self, patching): class test_iso8601: def test_parse_with_timezone(self): - d = datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")) + d = datetime.now(_timezone.utc).replace(tzinfo=ZoneInfo("UTC")) assert parse_iso8601(d.isoformat()) == d # 2013-06-07T20:12:51.775877+00:00 iso = d.isoformat() @@ -124,7 +124,7 @@ def test_maybe_timedelta(arg, expected): def test_remaining(): # Relative - remaining(datetime.utcnow(), timedelta(hours=1), relative=True) + remaining(datetime.now(_timezone.utc), timedelta(hours=1), relative=True) """ The upcoming cases check whether the next run is calculated correctly @@ -188,38 +188,38 @@ def test_tz_or_local(self): assert timezone.tz_or_local(timezone.utc) def test_to_local(self): - assert timezone.to_local(make_aware(datetime.utcnow(), timezone.utc)) - assert timezone.to_local(datetime.utcnow()) + assert timezone.to_local(make_aware(datetime.now(_timezone.utc), timezone.utc)) + assert timezone.to_local(datetime.now(_timezone.utc)) def test_to_local_fallback(self): assert timezone.to_local_fallback( - make_aware(datetime.utcnow(), timezone.utc)) - assert timezone.to_local_fallback(datetime.utcnow()) + make_aware(datetime.now(_timezone.utc), timezone.utc)) + assert timezone.to_local_fallback(datetime.now(_timezone.utc)) class test_make_aware: def test_standard_tz(self): tz = tzinfo() - wtz = make_aware(datetime.utcnow(), tz) + wtz = make_aware(datetime.now(_timezone.utc), tz) assert wtz.tzinfo == tz def test_tz_when_zoneinfo(self): tz = ZoneInfo('US/Eastern') - wtz = make_aware(datetime.utcnow(), tz) + wtz = make_aware(datetime.now(_timezone.utc), tz) assert wtz.tzinfo == tz def test_maybe_make_aware(self): - aware = datetime.utcnow().replace(tzinfo=timezone.utc) + aware = datetime.now(_timezone.utc).replace(tzinfo=timezone.utc) assert maybe_make_aware(aware) - naive = datetime.utcnow() + naive = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 assert maybe_make_aware(naive) assert maybe_make_aware(naive).tzinfo is ZoneInfo("UTC") tz = ZoneInfo('US/Eastern') - eastern = datetime.utcnow().replace(tzinfo=tz) + eastern = datetime.now(_timezone.utc).replace(tzinfo=tz) assert maybe_make_aware(eastern).tzinfo is tz - utcnow = datetime.utcnow() + utcnow = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 assert maybe_make_aware(utcnow, 'UTC').tzinfo is ZoneInfo("UTC") @@ -232,17 +232,17 @@ def utcoffset(self, dt): return None # Mock no utcoffset specified tz = tzz() - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) @patch('dateutil.tz.datetime_ambiguous') def test_when_zoneinfo(self, datetime_ambiguous_mock): datetime_ambiguous_mock.return_value = False tz = ZoneInfo("US/Eastern") - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) datetime_ambiguous_mock.return_value = True tz2 = ZoneInfo("US/Eastern") - assert localize(make_aware(datetime.utcnow(), tz2), tz2) + assert localize(make_aware(datetime.now(_timezone.utc), tz2), tz2) @patch('dateutil.tz.datetime_ambiguous') def test_when_is_ambiguous(self, datetime_ambiguous_mock): @@ -256,11 +256,11 @@ def is_ambiguous(self, dt): datetime_ambiguous_mock.return_value = False tz = tzz() - assert localize(make_aware(datetime.utcnow(), tz), tz) + assert localize(make_aware(datetime.now(_timezone.utc), tz), tz) datetime_ambiguous_mock.return_value = True tz2 = tzz() - assert localize(make_aware(datetime.utcnow(), tz2), tz2) + assert localize(make_aware(datetime.now(_timezone.utc), tz2), tz2) def test_localize_changes_utc_dt(self): now_utc_time = datetime.now(tz=ZoneInfo("UTC")) diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index 342e7092b1a..44408599dc7 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -2,7 +2,7 @@ import os import signal import socket -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from time import monotonic, time from unittest.mock import Mock, patch @@ -537,7 +537,7 @@ def test_cancel__task_reserved(self): def test_revoked_expires_expired(self): job = self.get_request(self.mytask.s(1, f='x').set( - expires=datetime.utcnow() - timedelta(days=1) + expires=datetime.now(timezone.utc) - timedelta(days=1) )) with self.assert_signal_called( task_revoked, sender=job.task, request=job._context, @@ -549,7 +549,7 @@ def test_revoked_expires_expired(self): def test_revoked_expires_not_expired(self): job = self.xRequest( - expires=datetime.utcnow() + timedelta(days=1), + expires=datetime.now(timezone.utc) + timedelta(days=1), ) job.revoked() assert job.id not in revoked @@ -558,7 +558,7 @@ def test_revoked_expires_not_expired(self): def test_revoked_expires_ignore_result(self): self.mytask.ignore_result = True job = self.xRequest( - expires=datetime.utcnow() - timedelta(days=1), + expires=datetime.now(timezone.utc) - timedelta(days=1), ) job.revoked() assert job.id in revoked From 7861fd4ebfa840a06102f7c2e95720bb84a13c63 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Dec 2023 23:01:05 +0200 Subject: [PATCH 0577/1051] [pre-commit.ci] pre-commit autoupdate (#8740) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.7.1 → v1.8.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.1...v1.8.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10b034c957a..8e681020401 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 + rev: v1.8.0 hooks: - id: mypy pass_filenames: false From 40d38a835ade91676f1ef3d1be24f9e698a76086 Mon Sep 17 00:00:00 2001 From: Viicos <65306057+Viicos@users.noreply.github.com> Date: Thu, 28 Dec 2023 11:26:45 +0100 Subject: [PATCH 0578/1051] Remove `new` adjective in docs --- docs/userguide/periodic-tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index b55799d2fe6..1928b1f9ac3 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -50,7 +50,7 @@ schedule manually. .. admonition:: Django Users - Celery recommends and is compatible with the new ``USE_TZ`` setting introduced + Celery recommends and is compatible with the ``USE_TZ`` setting introduced in Django 1.4. For Django users the time zone specified in the ``TIME_ZONE`` setting From 34a951b93a43499a1d96a9ca3ab4c71ac2550150 Mon Sep 17 00:00:00 2001 From: Emile Date: Wed, 3 Jan 2024 15:30:42 +0100 Subject: [PATCH 0579/1051] add type annotation (#8747) --- celery/utils/sysinfo.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/celery/utils/sysinfo.py b/celery/utils/sysinfo.py index 57425dd8173..52fc45e5474 100644 --- a/celery/utils/sysinfo.py +++ b/celery/utils/sysinfo.py @@ -1,4 +1,6 @@ """System information utilities.""" +from __future__ import annotations + import os from math import ceil @@ -9,16 +11,16 @@ if hasattr(os, 'getloadavg'): - def _load_average(): + def _load_average() -> tuple[float, ...]: return tuple(ceil(l * 1e2) / 1e2 for l in os.getloadavg()) else: # pragma: no cover # Windows doesn't have getloadavg - def _load_average(): - return (0.0, 0.0, 0.0) + def _load_average() -> tuple[float, ...]: + return 0.0, 0.0, 0.0, -def load_average(): +def load_average() -> tuple[float, ...]: """Return system load average as a triple.""" return _load_average() @@ -26,23 +28,23 @@ def load_average(): class df: """Disk information.""" - def __init__(self, path): + def __init__(self, path: str | bytes | os.PathLike) -> None: self.path = path @property - def total_blocks(self): + def total_blocks(self) -> float: return self.stat.f_blocks * self.stat.f_frsize / 1024 @property - def available(self): + def available(self) -> float: return self.stat.f_bavail * self.stat.f_frsize / 1024 @property - def capacity(self): + def capacity(self) -> int: avail = self.stat.f_bavail used = self.stat.f_blocks - self.stat.f_bfree return int(ceil(used * 100.0 / (used + avail) + 0.5)) @cached_property - def stat(self): + def stat(self) -> os.statvfs_result: return os.statvfs(os.path.abspath(self.path)) From be61f8f311b3cdc08c7957cf5b9df9a808a25686 Mon Sep 17 00:00:00 2001 From: Emile Date: Wed, 3 Jan 2024 19:19:46 +0100 Subject: [PATCH 0580/1051] add type annotation (#8750) --- celery/utils/iso8601.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 74aff491a69..33176576b7f 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -50,7 +50,7 @@ ) -def parse_iso8601(datestring): +def parse_iso8601(datestring: str) -> str: """Parse and convert ISO-8601 string to datetime.""" warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat or dateutil.parser.isoparse") m = ISO8601_REGEX.match(datestring) From 12a59f821fb8c5c857bedfb4832e1d72f345e6a1 Mon Sep 17 00:00:00 2001 From: Emile Date: Thu, 4 Jan 2024 16:59:19 +0100 Subject: [PATCH 0581/1051] Change type annotation to celery/utils/iso8601.py (#8752) * add type annotation * change type annotation --- celery/utils/iso8601.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index 33176576b7f..f878bec59e1 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -50,7 +50,7 @@ ) -def parse_iso8601(datestring: str) -> str: +def parse_iso8601(datestring: str) -> datetime: """Parse and convert ISO-8601 string to datetime.""" warn("parse_iso8601", "v5.3", "v6", "datetime.datetime.fromisoformat or dateutil.parser.isoparse") m = ISO8601_REGEX.match(datestring) From 516e332f21a630baee001e7d9f57bca8b8fd902b Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 5 Jan 2024 13:30:29 +0100 Subject: [PATCH 0582/1051] Update test deps --- requirements/test.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index be7af014b73..35991da4076 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.3 +pytest==7.4.4 pytest-celery==0.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 @@ -7,8 +7,8 @@ pytest-order==1.2.0 boto3>=1.26.143 moto>=4.1.11 # typing extensions -mypy==1.7.1; platform_python_implementation=="CPython" -pre-commit==3.5.0 +mypy==1.8.0; platform_python_implementation=="CPython" +pre-commit==3.6.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From 950711074dda320864ebc831727df35a34933876 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Fri, 5 Jan 2024 13:34:11 +0100 Subject: [PATCH 0583/1051] Update requirements/test.txt --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 35991da4076..ad4f6ae5c95 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ boto3>=1.26.143 moto>=4.1.11 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" -pre-commit==3.6.0 +pre-commit==3.5.0 -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From f9573974351b7f2d3106d1d0cf349b6b27fb1ed1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 6 Jan 2024 20:10:42 +0200 Subject: [PATCH 0584/1051] Mark flaky: test_asyncresult_get_cancels_subscription() (#8757) --- t/integration/test_tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 10a41f407e0..6ce6b509c7e 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -540,6 +540,7 @@ def test_asyncresult_forget_cancels_subscription(self, manager): new_channels = [channel for channel in get_active_redis_channels() if channel not in channels_before_test] assert new_channels == [] + @flaky def test_asyncresult_get_cancels_subscription(self, manager): channels_before_test = get_active_redis_channels() From 232acf9ffb768e0ea614dc6bb0150f9983b6ff85 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 16:04:13 +0100 Subject: [PATCH 0585/1051] change _read_as_base64 (b64encode returns bytes) (#8759) --- celery/utils/term.py | 5 ++--- t/unit/utils/test_term.py | 17 ++++++++++++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index a2eff996333..850abffe0f7 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -1,6 +1,5 @@ """Terminals and colors.""" import base64 -import codecs import os import platform import sys @@ -166,9 +165,9 @@ def supports_images(): def _read_as_base64(path): - with codecs.open(path, mode='rb') as fh: + with open(path, mode='rb') as fh: encoded = base64.b64encode(fh.read()) - return encoded if isinstance(encoded, str) else encoded.decode('ascii') + return encoded.decode('ascii') def imgcat(path, inline=1, preserve_aspect_ratio=0, **kwargs): diff --git a/t/unit/utils/test_term.py b/t/unit/utils/test_term.py index 1a599b57d8c..2261b59f8e3 100644 --- a/t/unit/utils/test_term.py +++ b/t/unit/utils/test_term.py @@ -1,8 +1,11 @@ +from base64 import b64encode +from tempfile import NamedTemporaryFile + import pytest import t.skip from celery.utils import term -from celery.utils.term import colored, fg +from celery.utils.term import _read_as_base64, colored, fg @t.skip.if_win32 @@ -55,3 +58,15 @@ def test_more_unicode(self): c2 = colored().blue('ƒƒz') c3 = c._add(c, c2) assert c3 == '\x1b[1;31m\xe5foo\x1b[0m\x1b[1;34m\u0192\u0192z\x1b[0m' + + def test_read_as_base64(self): + test_data = b"The quick brown fox jumps over the lazy dog" + with NamedTemporaryFile(mode='wb') as temp_file: + temp_file.write(test_data) + temp_file.seek(0) + temp_file_path = temp_file.name + + result = _read_as_base64(temp_file_path) + expected_result = b64encode(test_data).decode('ascii') + + assert result == expected_result From e1d3df4c49abe9c8e3e5bc15e7c6ac5b1f609301 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 16:09:20 +0100 Subject: [PATCH 0586/1051] Replace string concatenation with fstring (#8760) --- celery/utils/term.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index 850abffe0f7..700a80e84a5 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -56,7 +56,7 @@ def __init__(self, *s, **kwargs): } def _add(self, a, b): - return str(a) + str(b) + return f"{a}{b}" def _fold_no_color(self, a, b): try: @@ -68,7 +68,7 @@ def _fold_no_color(self, a, b): except AttributeError: B = str(b) - return ''.join((str(A), str(B))) + return f"{A}{B}" def no_color(self): if self.s: @@ -79,13 +79,13 @@ def embed(self): prefix = '' if self.enabled: prefix = self.op - return ''.join((str(prefix), str(reduce(self._add, self.s)))) + return f"{prefix}{reduce(self._add, self.s)}" def __str__(self): suffix = '' if self.enabled: suffix = RESET_SEQ - return str(''.join((self.embed(), str(suffix)))) + return f"{self.embed()}{suffix}" def node(self, s, op): return self.__class__(enabled=self.enabled, op=op, *s) @@ -157,7 +157,7 @@ def reset(self, *s): return self.node(s or [''], RESET_SEQ) def __add__(self, other): - return str(self) + str(other) + return f"{self}{other}" def supports_images(): From 9ac848f2cdfcbdcf6562accf2cb6f1eff7791dd5 Mon Sep 17 00:00:00 2001 From: Emile Date: Sun, 7 Jan 2024 18:10:14 +0100 Subject: [PATCH 0587/1051] add type annotation (#8755) --- celery/utils/term.py | 84 +++++++++++++++++++++++--------------------- 1 file changed, 44 insertions(+), 40 deletions(-) diff --git a/celery/utils/term.py b/celery/utils/term.py index 700a80e84a5..53236ad549d 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -1,4 +1,6 @@ """Terminals and colors.""" +from __future__ import annotations + import base64 import os import platform @@ -7,6 +9,8 @@ __all__ = ('colored',) +from typing import Any + BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8) OP_SEQ = '\033[%dm' RESET_SEQ = '\033[0m' @@ -25,7 +29,7 @@ _IMG_POST = '\a\033\\' if TERM_IS_SCREEN else '\a' -def fg(s): +def fg(s: int) -> str: return COLOR_SEQ % s @@ -40,11 +44,11 @@ class colored: ... c.green('dog '))) """ - def __init__(self, *s, **kwargs): - self.s = s - self.enabled = not IS_WINDOWS and kwargs.get('enabled', True) - self.op = kwargs.get('op', '') - self.names = { + def __init__(self, *s: object, **kwargs: Any) -> None: + self.s: tuple[object, ...] = s + self.enabled: bool = not IS_WINDOWS and kwargs.get('enabled', True) + self.op: str = kwargs.get('op', '') + self.names: dict[str, Any] = { 'black': self.black, 'red': self.red, 'green': self.green, @@ -55,10 +59,10 @@ def __init__(self, *s, **kwargs): 'white': self.white, } - def _add(self, a, b): + def _add(self, a: object, b: object) -> str: return f"{a}{b}" - def _fold_no_color(self, a, b): + def _fold_no_color(self, a: Any, b: Any) -> str: try: A = a.no_color() except AttributeError: @@ -70,107 +74,107 @@ def _fold_no_color(self, a, b): return f"{A}{B}" - def no_color(self): + def no_color(self) -> str: if self.s: return str(reduce(self._fold_no_color, self.s)) return '' - def embed(self): + def embed(self) -> str: prefix = '' if self.enabled: prefix = self.op return f"{prefix}{reduce(self._add, self.s)}" - def __str__(self): + def __str__(self) -> str: suffix = '' if self.enabled: suffix = RESET_SEQ return f"{self.embed()}{suffix}" - def node(self, s, op): + def node(self, s: tuple[object, ...], op: str) -> colored: return self.__class__(enabled=self.enabled, op=op, *s) - def black(self, *s): + def black(self, *s: object) -> colored: return self.node(s, fg(30 + BLACK)) - def red(self, *s): + def red(self, *s: object) -> colored: return self.node(s, fg(30 + RED)) - def green(self, *s): + def green(self, *s: object) -> colored: return self.node(s, fg(30 + GREEN)) - def yellow(self, *s): + def yellow(self, *s: object) -> colored: return self.node(s, fg(30 + YELLOW)) - def blue(self, *s): + def blue(self, *s: object) -> colored: return self.node(s, fg(30 + BLUE)) - def magenta(self, *s): + def magenta(self, *s: object) -> colored: return self.node(s, fg(30 + MAGENTA)) - def cyan(self, *s): + def cyan(self, *s: object) -> colored: return self.node(s, fg(30 + CYAN)) - def white(self, *s): + def white(self, *s: object) -> colored: return self.node(s, fg(30 + WHITE)) - def __repr__(self): + def __repr__(self) -> str: return repr(self.no_color()) - def bold(self, *s): + def bold(self, *s: object) -> colored: return self.node(s, OP_SEQ % 1) - def underline(self, *s): + def underline(self, *s: object) -> colored: return self.node(s, OP_SEQ % 4) - def blink(self, *s): + def blink(self, *s: object) -> colored: return self.node(s, OP_SEQ % 5) - def reverse(self, *s): + def reverse(self, *s: object) -> colored: return self.node(s, OP_SEQ % 7) - def bright(self, *s): + def bright(self, *s: object) -> colored: return self.node(s, OP_SEQ % 8) - def ired(self, *s): + def ired(self, *s: object) -> colored: return self.node(s, fg(40 + RED)) - def igreen(self, *s): + def igreen(self, *s: object) -> colored: return self.node(s, fg(40 + GREEN)) - def iyellow(self, *s): + def iyellow(self, *s: object) -> colored: return self.node(s, fg(40 + YELLOW)) - def iblue(self, *s): + def iblue(self, *s: colored) -> colored: return self.node(s, fg(40 + BLUE)) - def imagenta(self, *s): + def imagenta(self, *s: object) -> colored: return self.node(s, fg(40 + MAGENTA)) - def icyan(self, *s): + def icyan(self, *s: object) -> colored: return self.node(s, fg(40 + CYAN)) - def iwhite(self, *s): + def iwhite(self, *s: object) -> colored: return self.node(s, fg(40 + WHITE)) - def reset(self, *s): - return self.node(s or [''], RESET_SEQ) + def reset(self, *s: object) -> colored: + return self.node(s or ('',), RESET_SEQ) - def __add__(self, other): + def __add__(self, other: object) -> str: return f"{self}{other}" -def supports_images(): - return sys.stdin.isatty() and ITERM_PROFILE +def supports_images() -> bool: + return sys.stdin.isatty() and ITERM_PROFILE is not None -def _read_as_base64(path): +def _read_as_base64(path: str) -> str: with open(path, mode='rb') as fh: encoded = base64.b64encode(fh.read()) return encoded.decode('ascii') -def imgcat(path, inline=1, preserve_aspect_ratio=0, **kwargs): +def imgcat(path: str, inline: int = 1, preserve_aspect_ratio: int = 0, **kwargs: Any) -> str: return '\n%s1337;File=inline=%d;preserveAspectRatio=%d:%s%s' % ( _IMG_PRE, inline, preserve_aspect_ratio, _read_as_base64(path), _IMG_POST) From 851b897d38e7715ba64827c714aa5ec468b88bb0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 7 Jan 2024 23:49:13 +0200 Subject: [PATCH 0588/1051] Skipping test_tasks::test_task_accepted - Test fails randomly (non-deterministic) (#8761) --- t/integration/test_tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 6ce6b509c7e..87587119b15 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -418,7 +418,8 @@ def test_fail_with_unpickleable_exception(self, manager): assert result.status == 'FAILURE' - @flaky + # Requires investigation why it randomly succeeds/fails + @pytest.mark.skip(reason="Randomly fails") def test_task_accepted(self, manager, sleep=1): r1 = sleeping.delay(sleep) sleeping.delay(sleep) From 1c8e3f998bf4927f42a48d1649fd3c64cb1f3131 Mon Sep 17 00:00:00 2001 From: robotrapta <79607467+robotrapta@users.noreply.github.com> Date: Sun, 7 Jan 2024 16:58:21 -0800 Subject: [PATCH 0589/1051] Updated concurrency docs page. (#8753) * First draft of updated concurrency docs page. * Wordsmithing a bit. * Removing link to better external documentation. --- docs/userguide/concurrency/index.rst | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/userguide/concurrency/index.rst b/docs/userguide/concurrency/index.rst index 75faac8e98d..d0355fdfb80 100644 --- a/docs/userguide/concurrency/index.rst +++ b/docs/userguide/concurrency/index.rst @@ -7,8 +7,36 @@ :Release: |version| :Date: |today| +Concurrency in Celery enables the parallel execution of tasks. The default +model, `prefork`, is well-suited for many scenarios and generally recommended +for most users. In fact, switching to another mode will silently disable +certain features like `soft_timeout` and `max_tasks_per_child`. + +This page gives a quick overview of the available options which you can pick +between using the `--pool` option when starting the worker. + +Overview of Concurrency Options +------------------------------- + +- `prefork`: The default option, ideal for CPU-bound tasks and most use cases. + It is robust and recommended unless there's a specific need for another model. +- `eventlet` and `gevent`: Designed for IO-bound tasks, these models use + greenlets for high concurrency. Note that certain features, like `soft_timeout`, + are not available in these modes. These have detailed documentation pages + linked below. +- `solo`: Executes tasks sequentially in the main thread. +- `threads`: Utilizes threading for concurrency, available if the + `concurrent.futures` module is present. +- `custom`: Enables specifying a custom worker pool implementation through + environment variables. + .. toctree:: :maxdepth: 2 eventlet gevent + +.. note:: + While alternative models like `eventlet` and `gevent` are available, they + may lack certain features compared to `prefork`. We recommend `prefork` as + the starting point unless specific requirements dictate otherwise. From a06707f71d45e7c06e2fcf5439651ead39bc346e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 8 Jan 2024 16:55:07 +0200 Subject: [PATCH 0590/1051] Changed pyup -> dependabot for updating dependencies (#8764) --- .github/dependabot.yml | 4 ++++ .pyup.yml | 5 ----- 2 files changed, 4 insertions(+), 5 deletions(-) delete mode 100644 .pyup.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 123014908be..47a31bc9d65 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,3 +4,7 @@ updates: directory: "/" schedule: interval: "daily" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" diff --git a/.pyup.yml b/.pyup.yml deleted file mode 100644 index 0218aef3410..00000000000 --- a/.pyup.yml +++ /dev/null @@ -1,5 +0,0 @@ -# autogenerated pyup.io config file -# see https://pyup.io/docs/configuration/ for all available options - -schedule: "every week" -update: all From 3b4ab9ff7c5efc70f41a6437fe570e3eb11a7088 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:11:20 +0200 Subject: [PATCH 0591/1051] Bump isort from 5.12.0 to 5.13.2 (#8772) Bumps [isort](https://github.com/pycqa/isort) from 5.12.0 to 5.13.2. - [Release notes](https://github.com/pycqa/isort/releases) - [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) - [Commits](https://github.com/pycqa/isort/compare/5.12.0...5.13.2) --- updated-dependencies: - dependency-name: isort dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 441d81a3230..fae13c00951 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -2,4 +2,4 @@ git+https://github.com/celery/py-amqp.git git+https://github.com/celery/kombu.git git+https://github.com/celery/billiard.git vine>=5.0.0 -isort==5.12.0 +isort==5.13.2 From 7d1eb9adc3d178e016eda59ec05fa51472344d69 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:12:54 +0200 Subject: [PATCH 0592/1051] Update elasticsearch requirement from <=8.11.0 to <=8.11.1 (#8775) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.11.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 50764cdfb64..af927f70d11 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.11.0 +elasticsearch<=8.11.1 elastic-transport<=8.10.0 From 45dbe1cf9c98c4f0dff08a61e1067d680f6d5339 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 18:13:59 +0200 Subject: [PATCH 0593/1051] Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) Bumps [sphinx-click](https://github.com/click-contrib/sphinx-click) from 4.4.0 to 5.1.0. - [Release notes](https://github.com/click-contrib/sphinx-click/releases) - [Commits](https://github.com/click-contrib/sphinx-click/compare/4.4.0...5.1.0) --- updated-dependencies: - dependency-name: sphinx-click dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index fac534b02cf..2596004d021 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery>=2.0.0 Sphinx==5.3.0 sphinx-testing~=1.0.1 -sphinx-click==4.4.0 +sphinx-click==5.1.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From cd6738bb8663ac31dc37f033538f923250fbd266 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 19:06:02 +0200 Subject: [PATCH 0594/1051] Bump python-memcached from 1.59 to 1.61 (#8776) Bumps [python-memcached](https://github.com/linsomniac/python-memcached) from 1.59 to 1.61. - [Release notes](https://github.com/linsomniac/python-memcached/releases) - [Changelog](https://github.com/linsomniac/python-memcached/blob/master/ChangeLog) - [Commits](https://github.com/linsomniac/python-memcached/compare/1.59...1.61) --- updated-dependencies: - dependency-name: python-memcached dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/pymemcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pymemcache.txt b/requirements/extras/pymemcache.txt index 24743088b93..6429f34b9f5 100644 --- a/requirements/extras/pymemcache.txt +++ b/requirements/extras/pymemcache.txt @@ -1 +1 @@ -python-memcached==1.59 +python-memcached==1.61 From cf9785bd4fe5d1a26163b7721fd3bf4696b1e56a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 20:00:28 +0200 Subject: [PATCH 0595/1051] [pre-commit.ci] pre-commit autoupdate (#8778) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 6.1.0 → 7.0.0](https://github.com/PyCQA/flake8/compare/6.1.0...7.0.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8e681020401..66653ceaa63 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.0.0 hooks: - id: flake8 From 6a2720e4f7847fa501928754babbac62a12b3fc7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 01:56:30 +0200 Subject: [PATCH 0596/1051] Update elastic-transport requirement from <=8.10.0 to <=8.11.0 (#8780) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.11.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index af927f70d11..696c6ce76cc 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.11.1 -elastic-transport<=8.10.0 +elastic-transport<=8.11.0 From dc49ec2a95da14ae3449491a4aa1e799b1415375 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 11 Jan 2024 20:33:58 +0200 Subject: [PATCH 0597/1051] python-memcached==1.61 -> python-memcached>=1.61 (#8787) --- requirements/extras/pymemcache.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/pymemcache.txt b/requirements/extras/pymemcache.txt index 6429f34b9f5..ffa124846aa 100644 --- a/requirements/extras/pymemcache.txt +++ b/requirements/extras/pymemcache.txt @@ -1 +1 @@ -python-memcached==1.61 +python-memcached>=1.61 From fa1d98c2a86bf6a3d7987b85253a6a2fb9b90f74 Mon Sep 17 00:00:00 2001 From: Adam Weiss Date: Sun, 14 Jan 2024 14:30:58 -0500 Subject: [PATCH 0598/1051] Remove usage of utcnow (#8791) * Remove usage of utcnow * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/backends/base.py | 4 +- celery/backends/database/models.py | 8 +-- celery/backends/elasticsearch.py | 4 +- celery/utils/time.py | 2 +- t/unit/app/test_beat.py | 7 +- t/unit/backends/test_elasticsearch.py | 98 +++++++++++++-------------- t/unit/backends/test_mongodb.py | 5 +- t/unit/utils/test_time.py | 4 +- 8 files changed, 69 insertions(+), 63 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 4216c3b343e..f7d62c3dbe4 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -9,7 +9,7 @@ import time import warnings from collections import namedtuple -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from functools import partial from weakref import WeakValueDictionary @@ -460,7 +460,7 @@ def _get_result_meta(self, result, state, traceback, request, format_date=True, encode=False): if state in self.READY_STATES: - date_done = datetime.utcnow() + date_done = datetime.now(timezone.utc) if format_date: date_done = date_done.isoformat() else: diff --git a/celery/backends/database/models.py b/celery/backends/database/models.py index 1c766b51ca4..a5df8f4d341 100644 --- a/celery/backends/database/models.py +++ b/celery/backends/database/models.py @@ -1,5 +1,5 @@ """Database models used by the SQLAlchemy result store backend.""" -from datetime import datetime +from datetime import datetime, timezone import sqlalchemy as sa from sqlalchemy.types import PickleType @@ -22,8 +22,8 @@ class Task(ResultModelBase): task_id = sa.Column(sa.String(155), unique=True) status = sa.Column(sa.String(50), default=states.PENDING) result = sa.Column(PickleType, nullable=True) - date_done = sa.Column(sa.DateTime, default=datetime.utcnow, - onupdate=datetime.utcnow, nullable=True) + date_done = sa.Column(sa.DateTime, default=datetime.now(timezone.utc), + onupdate=datetime.now(timezone.utc), nullable=True) traceback = sa.Column(sa.Text, nullable=True) def __init__(self, task_id): @@ -84,7 +84,7 @@ class TaskSet(ResultModelBase): autoincrement=True, primary_key=True) taskset_id = sa.Column(sa.String(155), unique=True) result = sa.Column(PickleType, nullable=True) - date_done = sa.Column(sa.DateTime, default=datetime.utcnow, + date_done = sa.Column(sa.DateTime, default=datetime.now(timezone.utc), nullable=True) def __init__(self, taskset_id, result): diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index cb4ca4da0fd..a97869bef52 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -1,5 +1,5 @@ """Elasticsearch result store backend.""" -from datetime import datetime +from datetime import datetime, timezone from kombu.utils.encoding import bytes_to_str from kombu.utils.url import _parse_url @@ -129,7 +129,7 @@ def _set_with_state(self, key, value, state): body = { 'result': value, '@timestamp': '{}Z'.format( - datetime.utcnow().isoformat()[:-3] + datetime.now(timezone.utc).isoformat()[:-9] ), } try: diff --git a/celery/utils/time.py b/celery/utils/time.py index c8fd0959336..d27615cc10e 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -212,7 +212,7 @@ def remaining( using :func:`delta_resolution` (i.e., rounded to the resolution of `ends_in`). now (Callable): Function returning the current time and date. - Defaults to :func:`datetime.utcnow`. + Defaults to :func:`datetime.now(timezone.utc)`. Returns: ~datetime.timedelta: Remaining time. diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 6b113df426e..fa163bb931e 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -156,7 +156,10 @@ def is_due(self, *args, **kwargs): class mocked_schedule(schedule): - def __init__(self, is_due, next_run_at, nowfun=datetime.utcnow): + def now_func(): + return datetime.now(timezone.utc) + + def __init__(self, is_due, next_run_at, nowfun=now_func): self._is_due = is_due self._next_run_at = next_run_at self.run_every = timedelta(seconds=1) @@ -872,7 +875,7 @@ def test_maybe_make_aware(self): def test_to_local(self): x = schedule(10, app=self.app) x.utc_enabled = True - d = x.to_local(datetime.utcnow()) # datetime.utcnow() is deprecated in Python 3.12 + d = x.to_local(datetime.now()) assert d.tzinfo is None x.utc_enabled = False d = x.to_local(datetime.now(timezone.utc)) diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index a53fe512984..a465cbcf501 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -1,4 +1,4 @@ -import datetime +from datetime import datetime, timezone from unittest.mock import Mock, call, patch, sentinel import pytest @@ -150,8 +150,8 @@ def test_backend_by_url(self, url='elasticsearch://localhost:9200/index'): @patch('celery.backends.elasticsearch.datetime') def test_index_conflict(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -178,20 +178,20 @@ def test_index_conflict(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_doctype(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -219,21 +219,21 @@ def test_index_conflict_with_doctype(self, datetime_mock): id=sentinel.task_id, index=x.index, doc_type=x.doc_type, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, doc_type=x.doc_type, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_without_state(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -260,13 +260,13 @@ def test_index_conflict_without_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @@ -277,8 +277,8 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime so it cannot protect overriding a ready state by any other state. As a result, server.update will be called no matter what. """ - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -305,20 +305,20 @@ def test_index_conflict_with_ready_state_on_backend_without_state(self, datetime x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}}, + body={'doc': {'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}}, params={'if_seq_no': 2, 'if_primary_term': 1} ) @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_existing_success(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -347,15 +347,15 @@ def test_index_conflict_with_existing_success(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_not_called() @patch('celery.backends.elasticsearch.datetime') def test_index_conflict_with_existing_ready_state(self, datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + datetime_mock.now.return_value = expected_dt x = ElasticsearchBackend(app=self.app) x._server = Mock() @@ -382,7 +382,7 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): x._server.index.assert_called_once_with( id=sentinel.task_id, index=x.index, - body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-3] + 'Z'}, + body={'result': sentinel.result, '@timestamp': expected_dt.isoformat()[:-9] + 'Z'}, params={'op_type': 'create'}, ) x._server.update.assert_not_called() @@ -390,11 +390,11 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry x_server_get_side_effect = [ @@ -455,7 +455,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -464,7 +464,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -476,7 +476,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_seq_no': 2, 'if_primary_term': 1} @@ -487,7 +487,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_seq_no': 3, 'if_primary_term': 1} @@ -501,11 +501,11 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry try: @@ -550,7 +550,7 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -559,7 +559,7 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -572,11 +572,11 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_conflicting_document_removed_not_throwing(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry try: @@ -618,7 +618,7 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -627,7 +627,7 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ), @@ -640,11 +640,11 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date @patch('celery.backends.elasticsearch.datetime') @patch('celery.backends.base.datetime') def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, es_datetime_mock): - expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None) - es_datetime_mock.utcnow.return_value = expected_dt + expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) + es_datetime_mock.now.return_value = expected_dt - expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None) - base_datetime_mock.utcnow.return_value = expected_done_dt + expected_done_dt = datetime(2020, 6, 1, 18, 45, 34, 654321, timezone.utc) + base_datetime_mock.now.return_value = expected_done_dt # self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry # try: @@ -685,7 +685,7 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, index=x.index, body={ 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' }, params={'op_type': 'create'} ) @@ -695,7 +695,7 @@ def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, body={ 'doc': { 'result': expected_result, - '@timestamp': expected_dt.isoformat()[:-3] + 'Z' + '@timestamp': expected_dt.isoformat()[:-9] + 'Z' } }, params={'if_primary_term': 1, 'if_seq_no': 2} diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py index 6f74b42125f..9ae340ee149 100644 --- a/t/unit/backends/test_mongodb.py +++ b/t/unit/backends/test_mongodb.py @@ -563,7 +563,10 @@ def test_cleanup(self, mock_get_database): mock_database.__getitem__ = Mock(name='MD.__getitem__') mock_database.__getitem__.return_value = mock_collection - self.backend.app.now = datetime.datetime.utcnow + def now_func(): + return datetime.datetime.now(datetime.timezone.utc) + + self.backend.app.now = now_func self.backend.cleanup() mock_get_database.assert_called_once_with() diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 6b955e096e9..621769252a9 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -212,14 +212,14 @@ def test_tz_when_zoneinfo(self): def test_maybe_make_aware(self): aware = datetime.now(_timezone.utc).replace(tzinfo=timezone.utc) assert maybe_make_aware(aware) - naive = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 + naive = datetime.now() assert maybe_make_aware(naive) assert maybe_make_aware(naive).tzinfo is ZoneInfo("UTC") tz = ZoneInfo('US/Eastern') eastern = datetime.now(_timezone.utc).replace(tzinfo=tz) assert maybe_make_aware(eastern).tzinfo is tz - utcnow = datetime.utcnow() # datetime.utcnow() is deprecated in Python 3.12 + utcnow = datetime.now() assert maybe_make_aware(utcnow, 'UTC').tzinfo is ZoneInfo("UTC") From 9ed121d3d514a084247f4e29fbe7a7aa8b2d441a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Oct 2023 22:56:58 +0300 Subject: [PATCH 0599/1051] Pytest Celery Integration (#8241) * Added initial/sanity smoke tests * Allow using all integration tests tasks in the smoke tests environment, in addition to smoke tests specific tasks (to reuse existing tests tasks) * Added xdist support to smoke tests only * Added CI workflow for building the smoke tests dockerfiles * Added new tox env to clean resources & remains from the smoke tests: tox -e clean --- .github/workflows/docker.yml | 32 ++++++++++++ .github/workflows/python-package.yml | 39 +++++++++++++++ .gitignore | 1 + requirements/extras/pytest.txt | 3 ++ requirements/test-tmp_for_dev.txt | 3 ++ requirements/test.txt | 3 +- t/integration/conftest.py | 7 +-- t/integration/tasks.py | 50 +++++++++++-------- t/smoke/__init__.py | 0 t/smoke/conftest.py | 16 ++++++ t/smoke/signals.py | 26 ++++++++++ t/smoke/tasks.py | 15 ++++++ t/smoke/test_canvas.py | 73 ++++++++++++++++++++++++++++ t/smoke/test_consumer.py | 55 +++++++++++++++++++++ t/smoke/test_control.py | 7 +++ t/smoke/test_failover.py | 41 ++++++++++++++++ t/smoke/test_signals.py | 54 ++++++++++++++++++++ t/smoke/workers/__init__.py | 0 t/smoke/workers/dev.py | 66 +++++++++++++++++++++++++ t/smoke/workers/docker/dev | 34 +++++++++++++ t/smoke/workers/docker/pypi | 33 +++++++++++++ t/smoke/workers/latest.py | 51 +++++++++++++++++++ t/smoke/workers/legacy.py | 55 +++++++++++++++++++++ tox.ini | 18 +++++++ 24 files changed, 654 insertions(+), 28 deletions(-) create mode 100644 requirements/test-tmp_for_dev.txt create mode 100644 t/smoke/__init__.py create mode 100644 t/smoke/conftest.py create mode 100644 t/smoke/signals.py create mode 100644 t/smoke/tasks.py create mode 100644 t/smoke/test_canvas.py create mode 100644 t/smoke/test_consumer.py create mode 100644 t/smoke/test_control.py create mode 100644 t/smoke/test_failover.py create mode 100644 t/smoke/test_signals.py create mode 100644 t/smoke/workers/__init__.py create mode 100644 t/smoke/workers/dev.py create mode 100644 t/smoke/workers/docker/dev create mode 100644 t/smoke/workers/docker/pypi create mode 100644 t/smoke/workers/latest.py create mode 100644 t/smoke/workers/legacy.py diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bc39a2bd3b1..65dd0914029 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -36,3 +36,35 @@ jobs: - uses: actions/checkout@v4 - name: Build Documentation run: make docker-docs + + smoke-tests_dev: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: dev" + run: docker build -f t/smoke/workers/docker/dev . + + smoke-tests_latest: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: latest" + run: docker build -f t/smoke/workers/docker/pypi . + + smoke-tests_pypi: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: pypi" + run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="5" . + + smoke-tests_legacy: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: "Build smoke tests container: legacy" + run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="4" . diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 41bdf04ea3d..04c363a818c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -119,3 +119,42 @@ jobs: run: > tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + + Smoke: + # needs: + # - Integration + # if: needs.Integration.result == 'success' + # timeout-minutes: 240 + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 30 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 60 --rerun-except AssertionError -n auto diff --git a/.gitignore b/.gitignore index d892eca06e5..02c9965790a 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,4 @@ integration-tests-config.json [0-9]* statefilename.* dump.rdb +.env diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 6daa4ff1249..0d178f4a462 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1,4 @@ pytest-celery==0.0.0 +# pytest-celery==1.0.0a1 +# git+https://github.com/celery/pytest-celery.git +# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@celery_integration#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test-tmp_for_dev.txt b/requirements/test-tmp_for_dev.txt new file mode 100644 index 00000000000..326c2e82e07 --- /dev/null +++ b/requirements/test-tmp_for_dev.txt @@ -0,0 +1,3 @@ +# -e ../pytest-celery +git+https://github.com/celery/pytest-celery.git +# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@BRANCH_NAME#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test.txt b/requirements/test.txt index ad4f6ae5c95..2b26eef5e9f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,6 @@ pytest==7.4.4 -pytest-celery==0.0.0 +# pytest-celery==1.0.0a1 +pytest-rerunfailures==12.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 pytest-click==1.1.0 diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 550bd5d37ba..1707e3ca324 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -8,6 +8,7 @@ # that installs the pytest plugin into the setuptools registry. from celery.contrib.pytest import celery_app, celery_session_worker from celery.contrib.testing.manager import Manager +from t.integration.tasks import get_redis_connection TEST_BROKER = os.environ.get('TEST_BROKER', 'pyamqp://') TEST_BACKEND = os.environ.get('TEST_BACKEND', 'redis://') @@ -17,15 +18,9 @@ 'celery_app', 'celery_session_worker', 'get_active_redis_channels', - 'get_redis_connection', ) -def get_redis_connection(): - from redis import StrictRedis - return StrictRedis(host=os.environ.get('REDIS_HOST')) - - def get_active_redis_channels(): return get_redis_connection().execute_command('PUBSUB CHANNELS') diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 24dedbce29c..038b137f823 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -1,12 +1,18 @@ +import os from collections.abc import Iterable from time import sleep from celery import Signature, Task, chain, chord, group, shared_task -from celery.canvas import StampingVisitor, signature +from celery.canvas import signature from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger -from .conftest import get_redis_connection + +def get_redis_connection(): + from redis import StrictRedis + + return StrictRedis(host=os.environ.get("REDIS_HOST")) + logger = get_task_logger(__name__) @@ -455,28 +461,30 @@ def errback_new_style(request, exc, tb): return request.id -class StampOnReplace(StampingVisitor): - stamp = {'StampOnReplace': 'This is the replaced task'} +try: + from celery.canvas import StampingVisitor - def on_signature(self, sig, **headers) -> dict: - return self.stamp + class StampOnReplace(StampingVisitor): + stamp = {'StampOnReplace': 'This is the replaced task'} + def on_signature(self, sig, **headers) -> dict: + return self.stamp -class StampedTaskOnReplace(Task): - """Custom task for stamping on replace""" + class StampedTaskOnReplace(Task): + """Custom task for stamping on replace""" - def on_replace(self, sig): - sig.stamp(StampOnReplace()) - return super().on_replace(sig) - - -@shared_task -def replaced_with_me(): - return True + def on_replace(self, sig): + sig.stamp(StampOnReplace()) + return super().on_replace(sig) + @shared_task + def replaced_with_me(): + return True -@shared_task(bind=True, base=StampedTaskOnReplace) -def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): - if replace_with is None: - replace_with = replaced_with_me.s() - self.replace(signature(replace_with)) + @shared_task(bind=True, base=StampedTaskOnReplace) + def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): + if replace_with is None: + replace_with = replaced_with_me.s() + self.replace(signature(replace_with)) +except ImportError: + pass diff --git a/t/smoke/__init__.py b/t/smoke/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py new file mode 100644 index 00000000000..3b9b8e3c7ca --- /dev/null +++ b/t/smoke/conftest.py @@ -0,0 +1,16 @@ +import pytest + +from t.smoke.workers.dev import * # noqa +from t.smoke.workers.latest import * # noqa +from t.smoke.workers.legacy import * # noqa + + +@pytest.fixture +def default_worker_tasks() -> set: + from t.integration import tasks as integration_tests_tasks + from t.smoke import tasks as smoke_tests_tasks + + yield { + integration_tests_tasks, + smoke_tests_tasks, + } diff --git a/t/smoke/signals.py b/t/smoke/signals.py new file mode 100644 index 00000000000..298c12e17d3 --- /dev/null +++ b/t/smoke/signals.py @@ -0,0 +1,26 @@ +from celery.signals import worker_init, worker_process_init, worker_process_shutdown, worker_ready, worker_shutdown + + +@worker_init.connect +def worker_init_handler(sender, **kwargs): # type: ignore + print("worker_init_handler") + + +@worker_process_init.connect +def worker_process_init_handler(sender, **kwargs): # type: ignore + print("worker_process_init_handler") + + +@worker_process_shutdown.connect +def worker_process_shutdown_handler(sender, pid, exitcode, **kwargs): # type: ignore + print("worker_process_shutdown_handler") + + +@worker_ready.connect +def worker_ready_handler(sender, **kwargs): # type: ignore + print("worker_ready_handler") + + +@worker_shutdown.connect +def worker_shutdown_handler(sender, **kwargs): # type: ignore + print("worker_shutdown_handler") diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py new file mode 100644 index 00000000000..ad316d7347f --- /dev/null +++ b/t/smoke/tasks.py @@ -0,0 +1,15 @@ +from time import sleep + +import celery.utils +from celery import shared_task +from t.integration.tasks import * # noqa + + +@shared_task +def noop(*args, **kwargs) -> None: + return celery.utils.noop(*args, **kwargs) + + +@shared_task +def long_running_task(seconds: float = 1) -> None: + sleep(seconds) diff --git a/t/smoke/test_canvas.py b/t/smoke/test_canvas.py new file mode 100644 index 00000000000..965ac5e3179 --- /dev/null +++ b/t/smoke/test_canvas.py @@ -0,0 +1,73 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup + +from celery.canvas import chain, chord, group, signature +from t.smoke.tasks import add, identity + + +class test_signature: + def test_sanity(self, celery_setup: CeleryTestSetup): + sig = signature(identity, args=("test_signature",), queue=celery_setup.worker.worker_queue) + assert sig.delay().get(timeout=RESULT_TIMEOUT) == "test_signature" + + +class test_group: + def test_sanity(self, celery_setup: CeleryTestSetup): + sig = group( + group(add.si(1, 1), add.si(2, 2)), + group([add.si(1, 1), add.si(2, 2)]), + group(s for s in [add.si(1, 1), add.si(2, 2)]), + ) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == [2, 4, 2, 4, 2, 4] + + +class test_chain: + def test_sanity(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + sig = chain( + identity.si("chain_task1").set(queue=queue), + identity.si("chain_task2").set(queue=queue), + ) | identity.si("test_chain").set(queue=queue) + res = sig.apply_async() + assert res.get(timeout=RESULT_TIMEOUT) == "test_chain" + + +class test_chord: + def test_sanity(self, celery_setup: CeleryTestSetup): + if not celery_setup.chords_allowed(): + pytest.skip("Chords are not supported") + + upgraded_chord = signature( + group( + identity.si("header_task1"), + identity.si("header_task2"), + ) + | identity.si("body_task"), + queue=celery_setup.worker.worker_queue, + ) + + sig = group( + [ + upgraded_chord, + chord( + group( + identity.si("header_task3"), + identity.si("header_task4"), + ), + identity.si("body_task"), + ), + chord( + ( + sig + for sig in [ + identity.si("header_task5"), + identity.si("header_task6"), + ] + ), + identity.si("body_task"), + ), + ] + ) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == ["body_task"] * 3 diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py new file mode 100644 index 00000000000..0e0f09dbf33 --- /dev/null +++ b/t/smoke/test_consumer.py @@ -0,0 +1,55 @@ +import pytest +from pytest_celery import CeleryTestSetup, RedisTestBroker + +from celery import Celery +from celery.canvas import group +from t.smoke.tasks import long_running_task + +WORKER_PREFETCH_MULTIPLIER = 2 +WORKER_CONCURRENCY = 5 +MAX_PREFETCH = WORKER_PREFETCH_MULTIPLIER * WORKER_CONCURRENCY + + +@pytest.fixture +def default_worker_app(default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER + app.conf.worker_concurrency = WORKER_CONCURRENCY + yield app + + +class test_consumer: + @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) + def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): + sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + + expected_reduced_prefetch = max( + WORKER_PREFETCH_MULTIPLIER, MAX_PREFETCH - expected_running_tasks_count * WORKER_PREFETCH_MULTIPLIER + ) + + expected_prefetch_reduce_message = ( + f"Temporarily reducing the prefetch count to {expected_reduced_prefetch} " + f"to avoid over-fetching since {expected_running_tasks_count} tasks are currently being processed." + ) + celery_setup.worker.wait_for_log(expected_prefetch_reduce_message) + + expected_prefetch_restore_message = ( + f"The prefetch count will be gradually restored to {MAX_PREFETCH} " f"as the tasks complete processing." + ) + celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + + def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real bug in Redis broker") + + expected_running_tasks_count = MAX_PREFETCH+1 + sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + expected_prefetch_restore_message = ( + f"Resuming normal operations following a restart.\n" + f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" + ) + celery_setup.worker.wait_for_log(expected_prefetch_restore_message) diff --git a/t/smoke/test_control.py b/t/smoke/test_control.py new file mode 100644 index 00000000000..97ed8b9fe69 --- /dev/null +++ b/t/smoke/test_control.py @@ -0,0 +1,7 @@ +from pytest_celery import CeleryTestSetup + + +class test_control: + def test_sanity(self, celery_setup: CeleryTestSetup): + r = celery_setup.app.control.ping() + assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) diff --git a/t/smoke/test_failover.py b/t/smoke/test_failover.py new file mode 100644 index 00000000000..65d24ba5f63 --- /dev/null +++ b/t/smoke/test_failover.py @@ -0,0 +1,41 @@ +import pytest +from pytest_celery import (RABBITMQ_CONTAINER_TIMEOUT, RESULT_TIMEOUT, CeleryBrokerCluster, CeleryTestSetup, + RabbitMQContainer, RabbitMQTestBroker) +from pytest_docker_tools import container, fxtr + +from t.smoke.tasks import identity + +failover_broker = container( + image="{default_rabbitmq_broker_image}", + ports=fxtr("default_rabbitmq_broker_ports"), + environment=fxtr("default_rabbitmq_broker_env"), + network="{default_pytest_celery_network.name}", + wrapper_class=RabbitMQContainer, + timeout=RABBITMQ_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def failover_rabbitmq_broker(failover_broker: RabbitMQContainer) -> RabbitMQTestBroker: + broker = RabbitMQTestBroker(failover_broker) + yield broker + broker.teardown() + + +@pytest.fixture +def celery_broker_cluster( + celery_rabbitmq_broker: RabbitMQTestBroker, + failover_rabbitmq_broker: RabbitMQTestBroker, +) -> CeleryBrokerCluster: + cluster = CeleryBrokerCluster(celery_rabbitmq_broker, failover_rabbitmq_broker) + yield cluster + cluster.teardown() + + +class test_failover: + def test_sanity(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker.kill() + expected = "test_broker_failover" + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected diff --git a/t/smoke/test_signals.py b/t/smoke/test_signals.py new file mode 100644 index 00000000000..c3b6210eb2b --- /dev/null +++ b/t/smoke/test_signals.py @@ -0,0 +1,54 @@ +import pytest +from pytest_celery import CeleryTestSetup + +from celery.signals import after_task_publish, before_task_publish +from t.smoke.tasks import noop + + +@pytest.fixture +def default_worker_signals(default_worker_signals: set) -> set: + from t.smoke import signals + + default_worker_signals.add(signals) + yield default_worker_signals + + +class test_signals: + @pytest.mark.parametrize( + "log, control", + [ + ("worker_init_handler", None), + ("worker_process_init_handler", None), + ("worker_ready_handler", None), + ("worker_process_shutdown_handler", "shutdown"), + ("worker_shutdown_handler", "shutdown"), + ], + ) + def test_sanity(self, celery_setup: CeleryTestSetup, log: str, control: str): + if control: + celery_setup.app.control.broadcast(control) + celery_setup.worker.wait_for_log(log) + + +class test_before_task_publish: + def test_sanity(self, celery_setup: CeleryTestSetup): + @before_task_publish.connect + def before_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called = True + + signal_was_called = False + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert signal_was_called is True + + +class test_after_task_publish: + def test_sanity(self, celery_setup: CeleryTestSetup): + @after_task_publish.connect + def after_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called = True + + signal_was_called = False + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert signal_was_called is True diff --git a/t/smoke/workers/__init__.py b/t/smoke/workers/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py new file mode 100644 index 00000000000..13901729240 --- /dev/null +++ b/t/smoke/workers/dev.py @@ -0,0 +1,66 @@ +import os +from typing import Any, Type + +import pytest +from pytest_celery import CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +import celery + + +class SmokeWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def version(cls) -> str: + return celery.__version__ + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "smoke_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "smoke_tests_queue" + + +celery_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:dev", + buildargs=SmokeWorkerContainer.buildargs(), +) + + +default_worker_container = container( + image="{celery_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=SmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def default_worker_container_cls() -> Type[CeleryWorkerContainer]: + return SmokeWorkerContainer + + +@pytest.fixture(scope="session") +def default_worker_container_session_cls() -> Type[CeleryWorkerContainer]: + return SmokeWorkerContainer diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev new file mode 100644 index 00000000000..ee1709835e3 --- /dev/null +++ b/t/smoke/workers/docker/dev @@ -0,0 +1,34 @@ +FROM python:3.11-bookworm + +# Create a user to run the worker +RUN adduser --disabled-password --gecos "" test_user + +# Install system dependencies +RUN apt-get update && apt-get install -y build-essential + +# Set arguments +ARG CELERY_LOG_LEVEL=INFO +ARG CELERY_WORKER_NAME=celery_dev_worker +ARG CELERY_WORKER_QUEUE=celery +ENV LOG_LEVEL=$CELERY_LOG_LEVEL +ENV WORKER_NAME=$CELERY_WORKER_NAME +ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +# Install celery from source +WORKDIR /celery + +COPY --chown=test_user:test_user . /celery +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -e /celery[redis,memcache,pymemcache] + +# The workdir must be /app +WORKDIR /app + +# Switch to the test_user +USER test_user + +# Start the celery worker +CMD celery -A app worker --loglevel=$LOG_LEVEL -n $WORKER_NAME@%h -Q $WORKER_QUEUE diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi new file mode 100644 index 00000000000..85d51dadf9a --- /dev/null +++ b/t/smoke/workers/docker/pypi @@ -0,0 +1,33 @@ +FROM python:3.10-bookworm + +# Create a user to run the worker +RUN adduser --disabled-password --gecos "" test_user + +# Install system dependencies +RUN apt-get update && apt-get install -y build-essential + +# Set arguments +ARG CELERY_VERSION="" +ARG CELERY_LOG_LEVEL=INFO +ARG CELERY_WORKER_NAME=celery_tests_worker +ARG CELERY_WORKER_QUEUE=celery +ENV PIP_VERSION=$CELERY_VERSION +ENV LOG_LEVEL=$CELERY_LOG_LEVEL +ENV WORKER_NAME=$CELERY_WORKER_NAME +ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE + +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +# Install Python dependencies +RUN pip install --no-cache-dir --upgrade pip \ + && pip install --no-cache-dir celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} + +# The workdir must be /app +WORKDIR /app + +# Switch to the test_user +USER test_user + +# Start the celery worker +CMD celery -A app worker --loglevel=$LOG_LEVEL -n $WORKER_NAME@%h -Q $WORKER_QUEUE diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py new file mode 100644 index 00000000000..da18ceb602e --- /dev/null +++ b/t/smoke/workers/latest.py @@ -0,0 +1,51 @@ +from typing import Any + +import pytest +from pytest_celery import CeleryTestWorker, CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery + + +class CeleryLatestWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "celery_latest_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "celery_latest_tests_queue" + + +celery_latest_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/pypi", + tag="t/smoke/worker:latest", + buildargs=CeleryLatestWorkerContainer.buildargs(), +) + + +celery_latest_worker_container = container( + image="{celery_latest_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, + wrapper_class=CeleryLatestWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_latest_worker( + celery_latest_worker_container: CeleryLatestWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + yield CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py new file mode 100644 index 00000000000..0fb1f419bb6 --- /dev/null +++ b/t/smoke/workers/legacy.py @@ -0,0 +1,55 @@ +from typing import Any + +import pytest +from pytest_celery import CeleryTestWorker, CeleryWorkerContainer, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery + + +class CeleryLegacyWorkerContainer(CeleryWorkerContainer): + @property + def client(self) -> Any: + return self + + @classmethod + def version(cls) -> str: + return "4.4.7" # Last version of 4.x + + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_name(cls) -> str: + return "celery4_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "celery4_tests_queue" + + +celery_legacy_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/pypi", + tag="t/smoke/worker:legacy", + buildargs=CeleryLegacyWorkerContainer.buildargs(), +) + + +celery_legacy_worker_container = container( + image="{celery_legacy_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, + wrapper_class=CeleryLegacyWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_legacy_worker( + celery_legacy_worker_container: CeleryLegacyWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + yield CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) diff --git a/tox.ini b/tox.ini index 806b3d977ee..cc5087b3e03 100644 --- a/tox.ini +++ b/tox.ini @@ -4,6 +4,7 @@ requires = envlist = {3.8,3.9,3.10,3.11,3.12,pypy3}-unit {3.8,3.9,3.10,3.11,3.12,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} + {3.8,3.9,3.10,3.11,3.12,pypy3}-smoke flake8 apicheck @@ -28,6 +29,7 @@ passenv = deps= -r{toxinidir}/requirements/test.txt + -r{toxinidir}/requirements/test-tmp_for_dev.txt -r{toxinidir}/requirements/pkgutils.txt 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt @@ -35,6 +37,7 @@ deps= pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt + smoke: pytest-xdist==3.3.1 linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt lint: pre-commit @@ -43,11 +46,14 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} + smoke: pytest -xsv t/smoke {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null WORKER_LOGLEVEL = INFO PYTHONIOENCODING = UTF-8 + PYTHONUNBUFFERED = 1 + PYTHONDONTWRITEBYTECODE = 1 cache: TEST_BROKER=redis:// cache: TEST_BACKEND=cache+pylibmc:// @@ -113,3 +119,15 @@ commands = [testenv:lint] commands = pre-commit {posargs:run --all-files --show-diff-on-failure} + +[testenv:clean] +allowlist_externals = bash +commands_pre = + pip install cleanpy +commands = + python -m cleanpy . + bash -c 'files=$(find . -name "*.coverage*" -type f); if [ -n "$files" ]; then echo "Removed coverage file(s):"; echo "$files" | tr " " "\n"; rm $files; fi' + bash -c 'containers=$(docker ps -aq --filter label=creator=pytest-docker-tools); if [ -n "$containers" ]; then echo "Removed Docker container(s):"; docker rm -f $containers; fi' + bash -c 'networks=$(docker network ls --filter name=pytest- -q); if [ -n "$networks" ]; then echo "Removed Docker network(s):"; docker network rm $networks; fi' + bash -c 'volumes=$(docker volume ls --filter name=pytest- -q); if [ -n "$volumes" ]; then echo "Removed Docker volume(s):"; docker volume rm $volumes; fi' + From af898ac41fe1b2491f93ad0e4258dfe06f2d3f2a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 20 Oct 2023 20:48:09 +0300 Subject: [PATCH 0600/1051] Bugfix in test_prefetch_count_restored() and other enhancements (#8580) * Fixed bug in test: test_prefetch_count_restored() * Changed all smoke tests workers log level from INFO to DEBUG * Changed usage of wait_for_log() -> assert_log_exists() --- t/smoke/test_consumer.py | 15 ++++++--------- t/smoke/workers/dev.py | 2 +- t/smoke/workers/latest.py | 2 +- t/smoke/workers/legacy.py | 2 +- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py index 0e0f09dbf33..168711bc101 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/test_consumer.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import CeleryTestSetup, RedisTestBroker +from pytest_celery import CeleryTestSetup from celery import Celery from celery.canvas import group @@ -33,18 +33,15 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r f"Temporarily reducing the prefetch count to {expected_reduced_prefetch} " f"to avoid over-fetching since {expected_running_tasks_count} tasks are currently being processed." ) - celery_setup.worker.wait_for_log(expected_prefetch_reduce_message) + celery_setup.worker.assert_log_exists(expected_prefetch_reduce_message) expected_prefetch_restore_message = ( - f"The prefetch count will be gradually restored to {MAX_PREFETCH} " f"as the tasks complete processing." + f"The prefetch count will be gradually restored to {MAX_PREFETCH} as the tasks complete processing." ) - celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real bug in Redis broker") - - expected_running_tasks_count = MAX_PREFETCH+1 + expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -52,4 +49,4 @@ def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): f"Resuming normal operations following a restart.\n" f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" ) - celery_setup.worker.wait_for_log(expected_prefetch_restore_message) + celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 13901729240..14afe4435af 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -19,7 +19,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py index da18ceb602e..46ced3f34cd 100644 --- a/t/smoke/workers/latest.py +++ b/t/smoke/workers/latest.py @@ -14,7 +14,7 @@ def client(self) -> Any: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py index 0fb1f419bb6..9aefc89bcd2 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/workers/legacy.py @@ -18,7 +18,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "INFO" + return "DEBUG" @classmethod def worker_name(cls) -> str: From dd9699556aee4ecbb8e6659d9e28a0741ab9433f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 1 Nov 2023 01:10:47 +0200 Subject: [PATCH 0601/1051] * Added t/smoke/test_tasks.py (#8599) * Added auto-session redis:latest container to smoke tests --- t/integration/tasks.py | 13 ++++++++----- t/smoke/conftest.py | 22 ++++++++++++++++++++++ t/smoke/tasks.py | 14 ++++++++++++-- t/smoke/test_tasks.py | 29 +++++++++++++++++++++++++++++ 4 files changed, 71 insertions(+), 7 deletions(-) create mode 100644 t/smoke/test_tasks.py diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 038b137f823..b863c0739c7 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -11,7 +11,9 @@ def get_redis_connection(): from redis import StrictRedis - return StrictRedis(host=os.environ.get("REDIS_HOST")) + host = os.environ.get("REDIS_HOST", "localhost") + port = os.environ.get("REDIS_PORT", 6379) + return StrictRedis(host=host, port=port) logger = get_task_logger(__name__) @@ -461,6 +463,11 @@ def errback_new_style(request, exc, tb): return request.id +@shared_task +def replaced_with_me(): + return True + + try: from celery.canvas import StampingVisitor @@ -477,10 +484,6 @@ def on_replace(self, sig): sig.stamp(StampOnReplace()) return super().on_replace(sig) - @shared_task - def replaced_with_me(): - return True - @shared_task(bind=True, base=StampedTaskOnReplace) def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): if replace_with is None: diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 3b9b8e3c7ca..14954053654 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -1,4 +1,8 @@ +import os + import pytest +from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer +from pytest_docker_tools import container, fetch, network from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa @@ -14,3 +18,21 @@ def default_worker_tasks() -> set: integration_tests_tasks, smoke_tests_tasks, } + + +redis_image = fetch(repository=REDIS_IMAGE) +redis_test_container_network = network(scope="session") +redis_test_container: RedisContainer = container( + image="{redis_image.id}", + scope="session", + ports=REDIS_PORTS, + environment=REDIS_ENV, + network="{redis_test_container_network.name}", + wrapper_class=RedisContainer, + timeout=REDIS_CONTAINER_TIMEOUT, +) + + +@pytest.fixture(scope="session", autouse=True) +def set_redis_test_container(redis_test_container: RedisContainer): + os.environ["REDIS_PORT"] = str(redis_test_container.port) diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index ad316d7347f..edeb9a33b70 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,8 +1,10 @@ from time import sleep import celery.utils -from celery import shared_task +from celery import Task, shared_task, signature +from celery.canvas import Signature from t.integration.tasks import * # noqa +from t.integration.tasks import replaced_with_me @shared_task @@ -11,5 +13,13 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1) -> None: +def long_running_task(seconds: float = 1) -> bool: sleep(seconds) + return True + + +@shared_task(bind=True) +def replace_with_task(self: Task, replace_with: Signature = None): + if replace_with is None: + replace_with = replaced_with_me.s() + self.replace(signature(replace_with)) diff --git a/t/smoke/test_tasks.py b/t/smoke/test_tasks.py new file mode 100644 index 00000000000..289a537da9b --- /dev/null +++ b/t/smoke/test_tasks.py @@ -0,0 +1,29 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery import signature +from t.integration.tasks import add, identity +from t.smoke.tasks import replace_with_task + + +class test_replace: + @pytest.fixture + def celery_worker_cluster( + self, + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + ) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + def test_sanity(self, celery_setup: CeleryTestSetup): + queues = [w.worker_queue for w in celery_setup.worker_cluster] + assert len(queues) == 2 + assert queues[0] != queues[1] + replace_with = signature(identity, args=(40,), queue=queues[1]) + sig1 = replace_with_task.s(replace_with) + sig2 = add.s(2).set(queue=queues[1]) + c = sig1 | sig2 + r = c.apply_async(queue=queues[0]) + assert r.get(timeout=RESULT_TIMEOUT) == 42 From 99690613c4c1744890b34611ea5052c896412799 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 Nov 2023 12:58:33 +0200 Subject: [PATCH 0602/1051] Hotfix + New smoke tests (#8664) * Changed smoke tests workers log level to INFO * Hotfix in t/smoke/tasks.py * Fixed missing teardown() call in latest & legacy workers in the smoke tests * Prefetch count smoke tests * Added t/smoke/test_control.py::test_shutdown_exit_with_zero() * Trigger CI tests on PR to smoke_tests branch. To be removed before merge to main! --- .github/workflows/python-package.yml | 2 +- t/smoke/tasks.py | 2 +- t/smoke/test_consumer.py | 54 ++++++++++++++++++++++++++-- t/smoke/test_control.py | 6 ++++ t/smoke/workers/dev.py | 2 +- t/smoke/workers/latest.py | 6 ++-- t/smoke/workers/legacy.py | 6 ++-- 7 files changed, 68 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 04c363a818c..88945263ab0 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -12,7 +12,7 @@ on: - '.github/workflows/python-package.yml' - '**.toml' pull_request: - branches: [ 'main'] + branches: [ 'main', 'smoke_tests' ] paths: - '**.py' - '**.txt' diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index edeb9a33b70..99ef9eb4751 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -22,4 +22,4 @@ def long_running_task(seconds: float = 1) -> bool: def replace_with_task(self: Task, replace_with: Signature = None): if replace_with is None: replace_with = replaced_with_me.s() - self.replace(signature(replace_with)) + return self.replace(signature(replace_with)) diff --git a/t/smoke/test_consumer.py b/t/smoke/test_consumer.py index 168711bc101..04da3a1cdc7 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/test_consumer.py @@ -1,9 +1,9 @@ import pytest -from pytest_celery import CeleryTestSetup +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker from celery import Celery from celery.canvas import group -from t.smoke.tasks import long_running_task +from t.smoke.tasks import long_running_task, noop WORKER_PREFETCH_MULTIPLIER = 2 WORKER_CONCURRENCY = 5 @@ -18,7 +18,13 @@ def default_worker_app(default_worker_app: Celery) -> Celery: yield app -class test_consumer: +class test_worker_enable_prefetch_count_reduction_true: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_enable_prefetch_count_reduction = True + yield app + @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) @@ -50,3 +56,45 @@ def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): f"Prefetch count has been restored to the maximum of {MAX_PREFETCH}" ) celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) + + class test_cancel_tasks_on_connection_loss: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 2 + app.conf.worker_cancel_long_running_tasks_on_connection_loss = True + app.conf.task_acks_late = True + yield app + + def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + + sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) + sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.worker.assert_log_exists("Task t.smoke.tasks.noop") + + +class test_worker_enable_prefetch_count_reduction_false: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 1 + app.conf.worker_enable_prefetch_count_reduction = False + app.conf.worker_cancel_long_running_tasks_on_connection_loss = True + app.conf.task_acks_late = True + yield app + + def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + + sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) + r = sig.apply_async(queue=celery_setup.worker.worker_queue) + celery_setup.broker.restart() + noop.s().apply_async(queue=celery_setup.worker.worker_queue) + assert "Task t.smoke.tasks.noop" not in celery_setup.worker.logs() + r.get(timeout=RESULT_TIMEOUT) + assert "Task t.smoke.tasks.noop" in celery_setup.worker.logs() diff --git a/t/smoke/test_control.py b/t/smoke/test_control.py index 97ed8b9fe69..edd108b36e7 100644 --- a/t/smoke/test_control.py +++ b/t/smoke/test_control.py @@ -5,3 +5,9 @@ class test_control: def test_sanity(self, celery_setup: CeleryTestSetup): r = celery_setup.app.control.ping() assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) + + def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): + celery_setup.app.control.shutdown() + while celery_setup.worker.container.status != "exited": + celery_setup.worker.container.reload() + assert celery_setup.worker.container.attrs['State']['ExitCode'] == 0 diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 14afe4435af..13901729240 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -19,7 +19,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py index 46ced3f34cd..c922e98e6ef 100644 --- a/t/smoke/workers/latest.py +++ b/t/smoke/workers/latest.py @@ -14,7 +14,7 @@ def client(self) -> Any: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: @@ -48,4 +48,6 @@ def celery_latest_worker( celery_latest_worker_container: CeleryLatestWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: - yield CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) + worker = CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) + yield worker + worker.teardown() diff --git a/t/smoke/workers/legacy.py b/t/smoke/workers/legacy.py index 9aefc89bcd2..42a3952d575 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/workers/legacy.py @@ -18,7 +18,7 @@ def version(cls) -> str: @classmethod def log_level(cls) -> str: - return "DEBUG" + return "INFO" @classmethod def worker_name(cls) -> str: @@ -52,4 +52,6 @@ def celery_legacy_worker( celery_legacy_worker_container: CeleryLegacyWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: - yield CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) + worker = CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From 200520c6e9304764c325a7ae8b6099af0d17084f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Nov 2023 02:16:13 +0200 Subject: [PATCH 0603/1051] Canvas Stamping smoke tests (#8683) * Added t/smoke/stamping/ * Refactored tests folder structure * Added t/smoke/tests/stamping/test_stamping.py * Added test_stamping::test_sanity() * Added test_stamping::test_sanity_worker_hop() * Implemented stamping/signals.py::task_received_handler() * Added test_stamping.py::test_multiple_stamps_multiple_workers() * Added LEGACY_TASKS_DISABLED to t/integration/tasks.py * Removed celery_latest_worker from stamping smoke tests worker cluster * Added test_stamping.py::test_stamping_on_replace_with_legacy_worker_in_cluster() * Added test_stamping.py::class test_revoke_by_stamped_headers * Added Python 3.12 in smoke tests CI * --reruns-delay 60 -> 10 for smoke tests CI * Fixed incorrect assertion in test_revoke_by_stamped_headers::test_revoke_by_stamped_headers_after_publish() * Refactored test_stamping::test_sanity() * Added test_stamping::test_callback() * Refactored stamping tests worker clusters (better readability) * Disabled unstable test configuration in t/smoke/tests/test_consumer.py --- .github/workflows/python-package.yml | 4 +- t/integration/tasks.py | 15 +- t/smoke/tests/stamping/__init__.py | 0 t/smoke/tests/stamping/conftest.py | 17 ++ t/smoke/tests/stamping/signals.py | 12 ++ t/smoke/tests/stamping/tasks.py | 22 ++ t/smoke/tests/stamping/test_stamping.py | 261 ++++++++++++++++++++++++ t/smoke/{ => tests}/test_canvas.py | 0 t/smoke/{ => tests}/test_consumer.py | 6 + t/smoke/{ => tests}/test_control.py | 0 t/smoke/{ => tests}/test_failover.py | 0 t/smoke/{ => tests}/test_signals.py | 0 t/smoke/{ => tests}/test_tasks.py | 0 13 files changed, 329 insertions(+), 8 deletions(-) create mode 100644 t/smoke/tests/stamping/__init__.py create mode 100644 t/smoke/tests/stamping/conftest.py create mode 100644 t/smoke/tests/stamping/signals.py create mode 100644 t/smoke/tests/stamping/tasks.py create mode 100644 t/smoke/tests/stamping/test_stamping.py rename t/smoke/{ => tests}/test_canvas.py (100%) rename t/smoke/{ => tests}/test_consumer.py (94%) rename t/smoke/{ => tests}/test_control.py (100%) rename t/smoke/{ => tests}/test_failover.py (100%) rename t/smoke/{ => tests}/test_signals.py (100%) rename t/smoke/{ => tests}/test_tasks.py (100%) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 88945263ab0..7e555144da6 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -130,7 +130,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Fetch Docker Images @@ -157,4 +157,4 @@ jobs: timeout-minutes: 30 run: > tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 60 --rerun-except AssertionError -n auto + "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 10 --rerun-except AssertionError -n auto diff --git a/t/integration/tasks.py b/t/integration/tasks.py index b863c0739c7..f09492f3fd5 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -7,6 +7,13 @@ from celery.exceptions import SoftTimeLimitExceeded from celery.utils.log import get_task_logger +LEGACY_TASKS_DISABLED = True +try: + # Imports that are not available in Celery 4 + from celery.canvas import StampingVisitor +except ImportError: + LEGACY_TASKS_DISABLED = False + def get_redis_connection(): from redis import StrictRedis @@ -468,11 +475,9 @@ def replaced_with_me(): return True -try: - from celery.canvas import StampingVisitor - +if LEGACY_TASKS_DISABLED: class StampOnReplace(StampingVisitor): - stamp = {'StampOnReplace': 'This is the replaced task'} + stamp = {"StampOnReplace": "This is the replaced task"} def on_signature(self, sig, **headers) -> dict: return self.stamp @@ -489,5 +494,3 @@ def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): if replace_with is None: replace_with = replaced_with_me.s() self.replace(signature(replace_with)) -except ImportError: - pass diff --git a/t/smoke/tests/stamping/__init__.py b/t/smoke/tests/stamping/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py new file mode 100644 index 00000000000..0838a7a6ca0 --- /dev/null +++ b/t/smoke/tests/stamping/conftest.py @@ -0,0 +1,17 @@ +import pytest + + +@pytest.fixture +def default_worker_tasks(default_worker_tasks: set) -> set: + from t.smoke.tests.stamping import tasks as stamping_tasks + + default_worker_tasks.add(stamping_tasks) + yield default_worker_tasks + + +@pytest.fixture +def default_worker_signals(default_worker_signals: set) -> set: + from t.smoke.tests.stamping import signals + + default_worker_signals.add(signals) + yield default_worker_signals diff --git a/t/smoke/tests/stamping/signals.py b/t/smoke/tests/stamping/signals.py new file mode 100644 index 00000000000..86b27d7bb91 --- /dev/null +++ b/t/smoke/tests/stamping/signals.py @@ -0,0 +1,12 @@ +import json + +from celery.signals import task_received + + +@task_received.connect +def task_received_handler(request, **kwargs): + stamps = request.request_dict.get("stamps") + stamped_headers = request.request_dict.get("stamped_headers") + stamps_dump = json.dumps(stamps, indent=4, sort_keys=True) if stamps else stamps + print(f"stamped_headers = {stamped_headers}") + print(f"stamps = {stamps_dump}") diff --git a/t/smoke/tests/stamping/tasks.py b/t/smoke/tests/stamping/tasks.py new file mode 100644 index 00000000000..1068439358c --- /dev/null +++ b/t/smoke/tests/stamping/tasks.py @@ -0,0 +1,22 @@ +from time import sleep + +from celery import shared_task +from t.integration.tasks import LEGACY_TASKS_DISABLED + + +@shared_task +def waitfor(seconds: int) -> None: + print(f"Waiting for {seconds} seconds...") + for i in range(seconds): + sleep(1) + print(f"{i+1} seconds passed") + print("Done waiting") + + +if LEGACY_TASKS_DISABLED: + from t.integration.tasks import StampedTaskOnReplace, StampOnReplace + + @shared_task(bind=True, base=StampedTaskOnReplace) + def wait_for_revoke(self: StampOnReplace, seconds: int, waitfor_worker_queue) -> None: + print(f"Replacing {self.request.id} with waitfor({seconds})") + self.replace(waitfor.s(seconds).set(queue=waitfor_worker_queue)) diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py new file mode 100644 index 00000000000..8507f371955 --- /dev/null +++ b/t/smoke/tests/stamping/test_stamping.py @@ -0,0 +1,261 @@ +from __future__ import annotations + +import json + +import pytest +from pytest_celery import (RESULT_TIMEOUT, CeleryBackendCluster, CeleryTestSetup, CeleryTestWorker, + CeleryWorkerCluster) + +from celery.canvas import Signature, StampingVisitor, chain +from celery.result import AsyncResult +from t.integration.tasks import StampOnReplace, add, identity, replace_with_stamped_task +from t.smoke.tests.stamping.tasks import wait_for_revoke +from t.smoke.workers.dev import SmokeWorkerContainer +from t.smoke.workers.legacy import CeleryLegacyWorkerContainer + + +@pytest.fixture +def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == SmokeWorkerContainer.version(): + return worker + return None + + +@pytest.fixture +def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == CeleryLegacyWorkerContainer.version(): + return worker + return None + + +class test_stamping: + def test_callback(self, dev_worker: CeleryTestWorker): + on_signature_stamp = {"on_signature_stamp": 4} + no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} + on_callback_stamp = {"on_callback_stamp": 2} + link_stamp = { + **on_signature_stamp, + **no_visitor_stamp, + **on_callback_stamp, + } + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return on_signature_stamp.copy() + + def on_callback(self, callback, **header) -> dict: + return on_callback_stamp.copy() + + stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) + stamped_task.link( + add.s(0) + .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) + .set(queue=dev_worker.worker_queue) + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.delay().get(timeout=RESULT_TIMEOUT) + assert dev_worker.logs().count( + json.dumps(on_signature_stamp, indent=4, sort_keys=True) + ) + assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) + + +class test_stamping_hybrid_worker_cluster: + @pytest.fixture( + # Each param item is a list of workers to be used in the cluster + # and each cluster will be tested separately (with parallel support) + params=[ + ["celery_setup_worker"], + ["celery_setup_worker", "celery_legacy_worker"], + ] + ) + def celery_worker_cluster( + self, + request: pytest.FixtureRequest, + ) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + def test_sanity(self, celery_setup: CeleryTestSetup): + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + queue = worker.worker_queue + stamped_task = identity.si(123) + stamped_task.stamp(visitor=CustomStampingVisitor()) + assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) + assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) + + def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue), + identity.si(2).set(queue=w2.worker_queue), + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + assert worker.logs().count(stamp) + + def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 420} + stamp1 = {**stamp, "stamp1": 4} + stamp2 = {**stamp, "stamp2": 2} + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), + identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert w1.logs().count(stamp1) + assert w1.logs().count(stamp2) == 0 + + assert w2.logs().count(stamp1) == 0 + assert w2.logs().count(stamp2) + + def test_stamping_on_replace_with_legacy_worker_in_cluster( + self, + celery_setup: CeleryTestSetup, + dev_worker: CeleryTestWorker, + legacy_worker: CeleryTestWorker, + ): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": "Only for dev worker tasks"} + stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} + stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} + + replaced_sig1 = ( + identity.si(4) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp1=stamp1["stamp1"]) + ) + replaced_sig2 = ( + identity.si(2) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp2=stamp2["stamp2"]) + ) + + stamped_task = chain( + replace_with_stamped_task.si(replace_with=replaced_sig1).set( + queue=dev_worker.worker_queue + ), + replace_with_stamped_task.si(replace_with=replaced_sig2).set( + queue=dev_worker.worker_queue + ), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert dev_worker.logs().count(stamp) + assert dev_worker.logs().count(stamp1) == 0 + assert dev_worker.logs().count(stamp2) == 0 + + assert legacy_worker.logs().count(stamp) == 0 + assert legacy_worker.logs().count(stamp1) + assert legacy_worker.logs().count(stamp2) + + +class test_revoke_by_stamped_headers: + @pytest.fixture + def celery_worker_cluster( + self, + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + ) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + @pytest.fixture + def celery_backend_cluster(self) -> CeleryBackendCluster: + # Disable backend + return None + + @pytest.fixture + def wait_for_revoke_timeout(self) -> int: + return 4 + + @pytest.fixture + def canvas( + self, + dev_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + ) -> Signature: + return chain( + identity.s(wait_for_revoke_timeout), + wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( + queue=dev_worker.worker_queue + ), + ) + + def test_revoke_by_stamped_headers_after_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + canvas: Signature, + ): + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) + dev_worker.assert_log_does_not_exist( + "Done waiting", + timeout=wait_for_revoke_timeout, + ) + + def test_revoke_by_stamped_headers_before_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + canvas: Signature, + ): + result = canvas.freeze() + result.revoke_by_stamped_headers(StampOnReplace.stamp) + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + dev_worker.assert_log_exists("Discarding revoked task") + dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/test_canvas.py b/t/smoke/tests/test_canvas.py similarity index 100% rename from t/smoke/test_canvas.py rename to t/smoke/tests/test_canvas.py diff --git a/t/smoke/test_consumer.py b/t/smoke/tests/test_consumer.py similarity index 94% rename from t/smoke/test_consumer.py rename to t/smoke/tests/test_consumer.py index 04da3a1cdc7..5645f2689b8 100644 --- a/t/smoke/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -27,6 +27,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -47,6 +50,9 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) diff --git a/t/smoke/test_control.py b/t/smoke/tests/test_control.py similarity index 100% rename from t/smoke/test_control.py rename to t/smoke/tests/test_control.py diff --git a/t/smoke/test_failover.py b/t/smoke/tests/test_failover.py similarity index 100% rename from t/smoke/test_failover.py rename to t/smoke/tests/test_failover.py diff --git a/t/smoke/test_signals.py b/t/smoke/tests/test_signals.py similarity index 100% rename from t/smoke/test_signals.py rename to t/smoke/tests/test_signals.py diff --git a/t/smoke/test_tasks.py b/t/smoke/tests/test_tasks.py similarity index 100% rename from t/smoke/test_tasks.py rename to t/smoke/tests/test_tasks.py From 1794c6e115b80f29a09384826b7d618204480de2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Dec 2023 17:13:22 +0200 Subject: [PATCH 0604/1051] Increased stamping tests coverage + hotfixes (#8685) --- t/smoke/tests/stamping/test_stamping.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py index 8507f371955..fd10da44939 100644 --- a/t/smoke/tests/stamping/test_stamping.py +++ b/t/smoke/tests/stamping/test_stamping.py @@ -70,7 +70,9 @@ class test_stamping_hybrid_worker_cluster: # and each cluster will be tested separately (with parallel support) params=[ ["celery_setup_worker"], + ["celery_legacy_worker"], ["celery_setup_worker", "celery_legacy_worker"], + ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], ] ) def celery_worker_cluster( @@ -120,7 +122,7 @@ def on_signature(self, sig, **headers) -> dict: stamp = json.dumps(stamp, indent=4) worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: + for worker in (w1, w2): assert worker.logs().count(stamp) def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): @@ -252,10 +254,10 @@ def test_revoke_by_stamped_headers_before_publish( celery_latest_worker: CeleryTestWorker, canvas: Signature, ): - result = canvas.freeze() - result.revoke_by_stamped_headers(StampOnReplace.stamp) - result: AsyncResult = canvas.apply_async( - queue=celery_latest_worker.worker_queue + dev_worker.app.control.revoke_by_stamped_headers( + StampOnReplace.stamp, + terminate=True, ) + canvas.apply_async(queue=celery_latest_worker.worker_queue) dev_worker.assert_log_exists("Discarding revoked task") dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") From b88b3d7e86e4e918ac32fbfce7cbc68d29693032 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Dec 2023 18:17:43 +0200 Subject: [PATCH 0605/1051] Added test_broker_failover::test_reconnect_to_main() (#8686) --- t/smoke/tests/test_failover.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/t/smoke/tests/test_failover.py b/t/smoke/tests/test_failover.py index 65d24ba5f63..bfcaa86a688 100644 --- a/t/smoke/tests/test_failover.py +++ b/t/smoke/tests/test_failover.py @@ -32,10 +32,21 @@ def celery_broker_cluster( cluster.teardown() -class test_failover: - def test_sanity(self, celery_setup: CeleryTestSetup): +class test_broker_failover: + def test_killing_first_broker(self, celery_setup: CeleryTestSetup): assert len(celery_setup.broker_cluster) > 1 celery_setup.broker.kill() expected = "test_broker_failover" res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) assert res.get(timeout=RESULT_TIMEOUT) == expected + + def test_reconnect_to_main(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker_cluster[0].kill() + expected = "test_broker_failover" + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected + celery_setup.broker_cluster[1].kill() + celery_setup.broker_cluster[0].restart() + res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == expected From 94aaade1f8aeab302522d7ad7f33cec1664955f6 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 4 Dec 2023 01:11:03 +0200 Subject: [PATCH 0606/1051] Initial worker restart smoke tests (#8693) * Added t/smoke/tests/test_worker.py * Added another worker restart method: docker_restart_force --- .github/workflows/python-package.yml | 2 +- t/smoke/tasks.py | 17 +++++++- t/smoke/tests/test_worker.py | 60 ++++++++++++++++++++++++++++ tox.ini | 2 +- 4 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 t/smoke/tests/test_worker.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 7e555144da6..c6d01374b38 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -157,4 +157,4 @@ jobs: timeout-minutes: 30 run: > tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- --reruns 5 --reruns-delay 10 --rerun-except AssertionError -n auto + "${{ matrix.python-version }}-smoke" -- -n auto diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 99ef9eb4751..e5e8fac92d5 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -13,8 +13,21 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1) -> bool: - sleep(seconds) +def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: + from celery import current_task + from celery.utils.log import get_task_logger + + logger = get_task_logger(current_task.name) + + logger.info('Starting long running task') + + for i in range(0, int(seconds)): + sleep(1) + if verbose: + logger.info(f'Sleeping: {i}') + + logger.info('Finished long running task') + return True diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py new file mode 100644 index 00000000000..f88c6c4119c --- /dev/null +++ b/t/smoke/tests/test_worker.py @@ -0,0 +1,60 @@ +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup + +from celery import Celery +from celery.canvas import chain +from t.smoke.tasks import long_running_task + + +@pytest.mark.parametrize( + "restart_method", + [ + "pool_restart", + "docker_restart_gracefully", + "docker_restart_force", + ], +) +class test_worker_restart: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_pool_restarts = True + app.conf.task_acks_late = True + yield app + + def test_restart_during_task_execution( + self, + celery_setup: CeleryTestSetup, + restart_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + if restart_method == "pool_restart": + celery_setup.app.control.pool_restart() + elif restart_method == "docker_restart_gracefully": + celery_setup.worker.restart() + elif restart_method == "docker_restart_force": + celery_setup.worker.restart(force=True) + assert res.get(RESULT_TIMEOUT) is True + + def test_restart_between_task_execution( + self, + celery_setup: CeleryTestSetup, + restart_method: str, + ): + queue = celery_setup.worker.worker_queue + first = long_running_task.si(5, verbose=True).set(queue=queue) + first_res = first.freeze() + second = long_running_task.si(5, verbose=True).set(queue=queue) + second_res = second.freeze() + sig = chain(first, second) + sig.delay() + assert first_res.get(RESULT_TIMEOUT) is True + if restart_method == "pool_restart": + celery_setup.app.control.pool_restart() + elif restart_method == "docker_restart_gracefully": + celery_setup.worker.restart() + elif restart_method == "docker_restart_force": + celery_setup.worker.restart(force=True) + assert second_res.get(RESULT_TIMEOUT) is True diff --git a/tox.ini b/tox.ini index cc5087b3e03..e4b27ef70c7 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke {posargs} + smoke: pytest -xsv t/smoke --reruns 5 --reruns-delay 10 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 570beabd7b1506db5d0a2ac236849c7c4d17915e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 4 Dec 2023 01:40:01 +0200 Subject: [PATCH 0607/1051] Removed backend from setup in t/smoke/tests/test_signals.py (Optimization) (#8694) --- t/smoke/tests/test_signals.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/t/smoke/tests/test_signals.py b/t/smoke/tests/test_signals.py index c3b6210eb2b..17e9eae9406 100644 --- a/t/smoke/tests/test_signals.py +++ b/t/smoke/tests/test_signals.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import CeleryTestSetup +from pytest_celery import CeleryBackendCluster, CeleryTestSetup from celery.signals import after_task_publish, before_task_publish from t.smoke.tasks import noop @@ -13,6 +13,12 @@ def default_worker_signals(default_worker_signals: set) -> set: yield default_worker_signals +@pytest.fixture +def celery_backend_cluster() -> CeleryBackendCluster: + # Disable backend + return None + + class test_signals: @pytest.mark.parametrize( "log, control", From 31c23c53ba1b94dc207bbfeade7279bede3c4e86 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 5 Dec 2023 22:00:34 +0200 Subject: [PATCH 0608/1051] Added initial worker failover smoke tests (#8695) * Added alternative dev container with shared queue with the smoke tests worker (the default dev worker) * Added t/smoke/tests/failover/test_worker_failover.py * Added test_worker_failover::test_task_retry_on_worker_crash() * Added "memory_limit" termination method to class test_worker_failover * Cleanup * Added comments --- t/smoke/conftest.py | 1 + t/smoke/tasks.py | 19 +++- .../test_broker_failover.py} | 0 .../tests/failover/test_worker_failover.py | 95 +++++++++++++++++++ t/smoke/workers/alt.py | 37 ++++++++ 5 files changed, 148 insertions(+), 4 deletions(-) rename t/smoke/tests/{test_failover.py => failover/test_broker_failover.py} (100%) create mode 100644 t/smoke/tests/failover/test_worker_failover.py create mode 100644 t/smoke/workers/alt.py diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 14954053654..fc461d8c361 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -4,6 +4,7 @@ from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer from pytest_docker_tools import container, fetch, network +from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa from t.smoke.workers.legacy import * # noqa diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index e5e8fac92d5..301d36652ee 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,3 +1,6 @@ +from __future__ import annotations + +from sys import getsizeof from time import sleep import celery.utils @@ -13,20 +16,28 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: +def long_running_task( + seconds: float = 1, + verbose: bool = False, + allocate: int | None = None, +) -> bool: from celery import current_task from celery.utils.log import get_task_logger logger = get_task_logger(current_task.name) - logger.info('Starting long running task') + logger.info("Starting long running task") + + if allocate: + # Attempt to allocate megabytes in memory + _ = [0] * (allocate * 1024 * 1024 // getsizeof(int())) for i in range(0, int(seconds)): sleep(1) if verbose: - logger.info(f'Sleeping: {i}') + logger.info(f"Sleeping: {i}") - logger.info('Finished long running task') + logger.info("Finished long running task") return True diff --git a/t/smoke/tests/test_failover.py b/t/smoke/tests/failover/test_broker_failover.py similarity index 100% rename from t/smoke/tests/test_failover.py rename to t/smoke/tests/failover/test_broker_failover.py diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py new file mode 100644 index 00000000000..625a1255268 --- /dev/null +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker + +from celery import Celery +from t.smoke.tasks import long_running_task + + +@pytest.fixture +def celery_worker_cluster( + celery_worker: CeleryTestWorker, + celery_alt_dev_worker: CeleryTestWorker, +) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_alt_dev_worker) + yield cluster + cluster.teardown() + + +@pytest.mark.parametrize( + "termination_method", + [ + "SIGKILL", + "control.shutdown", + "memory_limit", + ], +) +class test_worker_failover: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.task_acks_late = True + app.conf.worker_max_memory_per_child = 10 * 1024 # Limit to 10MB + if app.conf.broker_url.startswith("redis"): + app.conf.broker_transport_options = {"visibility_timeout": 1} + yield app + + def terminate(self, worker: CeleryTestWorker, method: str): + if method == "SIGKILL": + # Reduces actual workers count by 1 + worker.kill() + elif method == "control.shutdown": + # Completes the task and then shuts down the worker + worker.app.control.broadcast("shutdown", destination=[worker.hostname()]) + elif method == "memory_limit": + # Child process is killed and a new one is spawned, but the worker is not terminated + allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 + sig = long_running_task.si(allocate=allocate).set(queue=worker.worker_queue) + sig.delay() + + def test_killing_first_worker( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=2) is True + self.terminate(celery_setup.worker, termination_method) + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=2) is True + + def test_reconnect_to_restarted_worker( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=10) is True + for worker in celery_setup.worker_cluster: + self.terminate(worker, termination_method) + celery_setup.worker.restart() + sig = long_running_task.si(1).set(queue=queue) + res = sig.delay() + assert res.get(timeout=10) is True + + def test_task_retry_on_worker_crash( + self, + celery_setup: CeleryTestSetup, + termination_method: str, + ): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") + + sleep_time = 4 + queue = celery_setup.worker.worker_queue + sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) + res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) + celery_setup.worker.wait_for_log("Sleeping: 2") # Wait for the task to run a bit + self.terminate(celery_setup.worker, termination_method) + assert res.get(timeout=10) is True diff --git a/t/smoke/workers/alt.py b/t/smoke/workers/alt.py new file mode 100644 index 00000000000..b333f2616e3 --- /dev/null +++ b/t/smoke/workers/alt.py @@ -0,0 +1,37 @@ +import os + +import pytest +from pytest_celery import CeleryTestWorker, defaults +from pytest_docker_tools import container, fxtr + +from celery import Celery +from t.smoke.workers.dev import SmokeWorkerContainer + +# Allows having two different workers with the same queue and settings +# that are based on the current codebase +alt_dev_worker_container = container( + image="{celery_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=SmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_alt_dev_worker( + alt_dev_worker_container: SmokeWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + worker = CeleryTestWorker(alt_dev_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From b7433b8a076ccde903036456eab4a3068b4acdeb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 6 Dec 2023 14:13:10 +0200 Subject: [PATCH 0609/1051] Hotfix to test_worker_failover.terminate() (#8698) * Use type annotation for control command * control.broadcast() -> control.shutdown() --- t/smoke/tests/failover/test_worker_failover.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 625a1255268..1e4b535b63f 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -4,6 +4,7 @@ from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery +from celery.app.control import Control from t.smoke.tasks import long_running_task @@ -41,7 +42,8 @@ def terminate(self, worker: CeleryTestWorker, method: str): worker.kill() elif method == "control.shutdown": # Completes the task and then shuts down the worker - worker.app.control.broadcast("shutdown", destination=[worker.hostname()]) + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) elif method == "memory_limit": # Child process is killed and a new one is spawned, but the worker is not terminated allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 From 76acdb326ac7250ee409f01fa0287efcd9827592 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 7 Dec 2023 19:13:30 +0200 Subject: [PATCH 0610/1051] Fixed default_worker_tasks() in t/smoke/conftest.py (#8704) --- t/smoke/conftest.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index fc461d8c361..68383dfd4d6 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -11,14 +11,13 @@ @pytest.fixture -def default_worker_tasks() -> set: +def default_worker_tasks(default_worker_tasks: set) -> set: from t.integration import tasks as integration_tests_tasks from t.smoke import tasks as smoke_tests_tasks - yield { - integration_tests_tasks, - smoke_tests_tasks, - } + default_worker_tasks.add(integration_tests_tasks) + default_worker_tasks.add(smoke_tests_tasks) + yield default_worker_tasks redis_image = fetch(repository=REDIS_IMAGE) From 97b7656348485f4e1f296419a8af562e736676bd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 10 Dec 2023 19:59:32 +0200 Subject: [PATCH 0611/1051] Refactored worker smoke tests (#8708) --- t/smoke/tasks.py | 8 ++- t/smoke/tests/conftest.py | 63 +++++++++++++++++++ .../tests/failover/test_worker_failover.py | 33 +++------- t/smoke/tests/test_worker.py | 27 +++----- 4 files changed, 89 insertions(+), 42 deletions(-) create mode 100644 t/smoke/tests/conftest.py diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 301d36652ee..d7b3f929461 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -20,6 +20,7 @@ def long_running_task( seconds: float = 1, verbose: bool = False, allocate: int | None = None, + exhaust_memory: bool = False, ) -> bool: from celery import current_task from celery.utils.log import get_task_logger @@ -30,7 +31,12 @@ def long_running_task( if allocate: # Attempt to allocate megabytes in memory - _ = [0] * (allocate * 1024 * 1024 // getsizeof(int())) + _ = [0] * (allocate * 10**6 // getsizeof(int())) + + if exhaust_memory: + mem = [] + while True: + mem.append(' ' * 10**6) # 1 MB of spaces for i in range(0, int(seconds)): sleep(1) diff --git a/t/smoke/tests/conftest.py b/t/smoke/tests/conftest.py new file mode 100644 index 00000000000..16f550c9167 --- /dev/null +++ b/t/smoke/tests/conftest.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from enum import Enum, auto + +from billiard.exceptions import WorkerLostError +from pytest_celery import CeleryTestSetup, CeleryTestWorker + +from celery.app.control import Control +from t.smoke.tasks import long_running_task + + +class WorkerOperations: + class TerminationMethod(Enum): + SIGKILL = auto() + CONTROL_SHUTDOWN = auto() + MAX_MEMORY_ALLOCATED = auto() + MEMORY_LIMIT_EXCEEDED = auto() + + class RestartMethod(Enum): + POOL_RESTART = auto() + DOCKER_RESTART_GRACEFULLY = auto() + DOCKER_RESTART_FORCE = auto() + + def terminate(self, worker: CeleryTestWorker, method: TerminationMethod): + if method == WorkerOperations.TerminationMethod.SIGKILL: + worker.kill() + return + + if method == WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN: + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) + return + + if method == WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED: + allocate = worker.app.conf.worker_max_memory_per_child * 10**6 + try: + ( + long_running_task.si(allocate=allocate) + .apply_async(queue=worker.worker_queue) + .get() + ) + except MemoryError: + return + + if method == WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED: + try: + ( + long_running_task.si(exhaust_memory=True) + .apply_async(queue=worker.worker_queue) + .get() + ) + except WorkerLostError: + return + + assert False + + def restart(self, celery_setup: CeleryTestSetup, method: RestartMethod): + if method == WorkerOperations.RestartMethod.POOL_RESTART: + celery_setup.app.control.pool_restart() + elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY: + celery_setup.worker.restart() + elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE: + celery_setup.worker.restart(force=True) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 1e4b535b63f..b555054e38f 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -4,8 +4,8 @@ from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery -from celery.app.control import Control from t.smoke.tasks import long_running_task +from t.smoke.tests.conftest import WorkerOperations @pytest.fixture @@ -21,12 +21,13 @@ def celery_worker_cluster( @pytest.mark.parametrize( "termination_method", [ - "SIGKILL", - "control.shutdown", - "memory_limit", + WorkerOperations.TerminationMethod.SIGKILL, + WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN, + WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED, + WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED, ], ) -class test_worker_failover: +class test_worker_failover(WorkerOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -36,24 +37,10 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.broker_transport_options = {"visibility_timeout": 1} yield app - def terminate(self, worker: CeleryTestWorker, method: str): - if method == "SIGKILL": - # Reduces actual workers count by 1 - worker.kill() - elif method == "control.shutdown": - # Completes the task and then shuts down the worker - control: Control = worker.app.control - control.shutdown(destination=[worker.hostname()]) - elif method == "memory_limit": - # Child process is killed and a new one is spawned, but the worker is not terminated - allocate = worker.app.conf.worker_max_memory_per_child * 1_000_000_000 - sig = long_running_task.si(allocate=allocate).set(queue=worker.worker_queue) - sig.delay() - def test_killing_first_worker( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) @@ -67,7 +54,7 @@ def test_killing_first_worker( def test_reconnect_to_restarted_worker( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) @@ -83,7 +70,7 @@ def test_reconnect_to_restarted_worker( def test_task_retry_on_worker_crash( self, celery_setup: CeleryTestSetup, - termination_method: str, + termination_method: WorkerOperations.TerminationMethod, ): if isinstance(celery_setup.broker, RedisTestBroker): pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") @@ -92,6 +79,6 @@ def test_task_retry_on_worker_crash( queue = celery_setup.worker.worker_queue sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) - celery_setup.worker.wait_for_log("Sleeping: 2") # Wait for the task to run a bit + celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run self.terminate(celery_setup.worker, termination_method) assert res.get(timeout=10) is True diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index f88c6c4119c..8a2713c9179 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -4,17 +4,18 @@ from celery import Celery from celery.canvas import chain from t.smoke.tasks import long_running_task +from t.smoke.tests.conftest import WorkerOperations @pytest.mark.parametrize( "restart_method", [ - "pool_restart", - "docker_restart_gracefully", - "docker_restart_force", + WorkerOperations.RestartMethod.POOL_RESTART, + WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY, + WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE, ], ) -class test_worker_restart: +class test_worker_restart(WorkerOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -25,23 +26,18 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: str, + restart_method: WorkerOperations.RestartMethod, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - if restart_method == "pool_restart": - celery_setup.app.control.pool_restart() - elif restart_method == "docker_restart_gracefully": - celery_setup.worker.restart() - elif restart_method == "docker_restart_force": - celery_setup.worker.restart(force=True) + self.restart(celery_setup, restart_method) assert res.get(RESULT_TIMEOUT) is True def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: str, + restart_method: WorkerOperations.RestartMethod, ): queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) @@ -51,10 +47,5 @@ def test_restart_between_task_execution( sig = chain(first, second) sig.delay() assert first_res.get(RESULT_TIMEOUT) is True - if restart_method == "pool_restart": - celery_setup.app.control.pool_restart() - elif restart_method == "docker_restart_gracefully": - celery_setup.worker.restart() - elif restart_method == "docker_restart_force": - celery_setup.worker.restart(force=True) + self.restart(celery_setup, restart_method) assert second_res.get(RESULT_TIMEOUT) is True From 11732bd06e2332df395aeb79f8d764d59ef37a50 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 10 Dec 2023 23:41:09 +0200 Subject: [PATCH 0612/1051] Hotfix (#8710) * Run all tests in CI together (to be reverted) * Changed celery_alt_dev_worker name from smoke_tests_worker -> alt_smoke_tests_worker * Refactored stamping smoke tests --- .github/workflows/python-package.yml | 8 +- t/smoke/conftest.py | 2 +- t/smoke/tests/stamping/conftest.py | 23 ++ t/smoke/tests/stamping/test_hybrid_cluster.py | 160 +++++++++++ t/smoke/tests/stamping/test_revoke.py | 75 +++++ t/smoke/tests/stamping/test_stamping.py | 263 ------------------ t/smoke/tests/stamping/test_visitor.py | 40 +++ .../{ => tests/stamping}/workers/legacy.py | 12 +- t/smoke/tests/test_tasks.py | 4 +- t/smoke/workers/alt.py | 27 +- t/smoke/workers/other.py | 56 ++++ 11 files changed, 388 insertions(+), 282 deletions(-) create mode 100644 t/smoke/tests/stamping/test_hybrid_cluster.py create mode 100644 t/smoke/tests/stamping/test_revoke.py delete mode 100644 t/smoke/tests/stamping/test_stamping.py create mode 100644 t/smoke/tests/stamping/test_visitor.py rename t/smoke/{ => tests/stamping}/workers/legacy.py (80%) create mode 100644 t/smoke/workers/other.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c6d01374b38..d68297ea641 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -70,10 +70,10 @@ jobs: verbose: true # optional (default = false) Integration: - needs: - - Unit - if: needs.Unit.result == 'success' - timeout-minutes: 240 + # needs: + # - Unit + # if: needs.Unit.result == 'success' + # timeout-minutes: 240 runs-on: ubuntu-latest strategy: diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 68383dfd4d6..f7ed5436790 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -7,7 +7,7 @@ from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa -from t.smoke.workers.legacy import * # noqa +from t.smoke.workers.other import * # noqa @pytest.fixture diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py index 0838a7a6ca0..db7e86ae030 100644 --- a/t/smoke/tests/stamping/conftest.py +++ b/t/smoke/tests/stamping/conftest.py @@ -1,4 +1,9 @@ import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker + +from t.smoke.tests.stamping.workers.legacy import * # noqa +from t.smoke.tests.stamping.workers.legacy import LegacyWorkerContainer +from t.smoke.workers.dev import SmokeWorkerContainer @pytest.fixture @@ -15,3 +20,21 @@ def default_worker_signals(default_worker_signals: set) -> set: default_worker_signals.add(signals) yield default_worker_signals + + +@pytest.fixture +def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == SmokeWorkerContainer.version(): + return worker + return None + + +@pytest.fixture +def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + if worker.version == LegacyWorkerContainer.version(): + return worker + return None diff --git a/t/smoke/tests/stamping/test_hybrid_cluster.py b/t/smoke/tests/stamping/test_hybrid_cluster.py new file mode 100644 index 00000000000..4e5af7a3e03 --- /dev/null +++ b/t/smoke/tests/stamping/test_hybrid_cluster.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +import json + +import pytest +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery.canvas import StampingVisitor, chain +from t.integration.tasks import StampOnReplace, identity, replace_with_stamped_task + + +def get_hybrid_clusters_matrix() -> list[list[str]]: + """Returns a matrix of hybrid worker clusters + + Each item in the matrix is a list of workers to be used in the cluster + and each cluster will be tested separately (with parallel support) + """ + + return [ + # Dev worker only + ["celery_setup_worker"], + # Legacy (Celery 4) worker only + ["celery_legacy_worker"], + # Both dev and legacy workers + ["celery_setup_worker", "celery_legacy_worker"], + # Dev worker and last official Celery release worker + ["celery_setup_worker", "celery_latest_worker"], + # Dev worker and legacy worker and last official Celery release worker + ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], + ] + + +@pytest.fixture(params=get_hybrid_clusters_matrix()) +def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + +class test_stamping_hybrid_worker_cluster: + def test_sanity(self, celery_setup: CeleryTestSetup): + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + worker: CeleryTestWorker + for worker in celery_setup.worker_cluster: + queue = worker.worker_queue + stamped_task = identity.si(123) + stamped_task.stamp(visitor=CustomStampingVisitor()) + assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) + assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) + + def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 42} + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return stamp.copy() + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue), + identity.si(2).set(queue=w2.worker_queue), + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + worker: CeleryTestWorker + for worker in (w1, w2): + assert worker.logs().count(stamp) + + def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + stamp = {"stamp": 420} + stamp1 = {**stamp, "stamp1": 4} + stamp2 = {**stamp, "stamp2": 2} + + w1: CeleryTestWorker = celery_setup.worker_cluster[0] + w2: CeleryTestWorker = celery_setup.worker_cluster[1] + stamped_task = chain( + identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), + identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert w1.logs().count(stamp1) + assert w1.logs().count(stamp2) == 0 + + assert w2.logs().count(stamp1) == 0 + assert w2.logs().count(stamp2) + + def test_stamping_on_replace_with_legacy_worker_in_cluster( + self, + celery_setup: CeleryTestSetup, + dev_worker: CeleryTestWorker, + legacy_worker: CeleryTestWorker, + ): + if len(celery_setup.worker_cluster) < 2: + pytest.skip("Not enough workers in cluster") + + if not dev_worker: + pytest.skip("Dev worker not in cluster") + + if not legacy_worker: + pytest.skip("Legacy worker not in cluster") + + stamp = {"stamp": "Only for dev worker tasks"} + stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} + stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} + + replaced_sig1 = ( + identity.si(4) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp1=stamp1["stamp1"]) + ) + replaced_sig2 = ( + identity.si(2) + .set(queue=legacy_worker.worker_queue) + .stamp(stamp2=stamp2["stamp2"]) + ) + + stamped_task = chain( + replace_with_stamped_task.si(replace_with=replaced_sig1).set( + queue=dev_worker.worker_queue + ), + replace_with_stamped_task.si(replace_with=replaced_sig2).set( + queue=dev_worker.worker_queue + ), + ) + stamped_task.stamp(stamp=stamp["stamp"]) + stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) + + stamp = json.dumps(stamp, indent=4) + stamp1 = json.dumps(stamp1, indent=4) + stamp2 = json.dumps(stamp2, indent=4) + + assert dev_worker.logs().count(stamp) + assert dev_worker.logs().count(stamp1) == 0 + assert dev_worker.logs().count(stamp2) == 0 + + assert legacy_worker.logs().count(stamp) == 0 + assert legacy_worker.logs().count(stamp1) + assert legacy_worker.logs().count(stamp2) diff --git a/t/smoke/tests/stamping/test_revoke.py b/t/smoke/tests/stamping/test_revoke.py new file mode 100644 index 00000000000..3ec1dcbadcd --- /dev/null +++ b/t/smoke/tests/stamping/test_revoke.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import pytest +from pytest_celery import CeleryBackendCluster, CeleryTestWorker, CeleryWorkerCluster + +from celery.canvas import Signature, chain +from celery.result import AsyncResult +from t.integration.tasks import StampOnReplace, identity +from t.smoke.tests.stamping.tasks import wait_for_revoke + + +@pytest.fixture +def celery_worker_cluster( + celery_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, +) -> CeleryWorkerCluster: + cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + yield cluster + cluster.teardown() + + +@pytest.fixture +def celery_backend_cluster() -> CeleryBackendCluster: + # Disable backend + return None + + +@pytest.fixture +def wait_for_revoke_timeout() -> int: + return 4 + + +@pytest.fixture +def canvas( + dev_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, +) -> Signature: + return chain( + identity.s(wait_for_revoke_timeout), + wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( + queue=dev_worker.worker_queue + ), + ) + + +class test_revoke_by_stamped_headers: + def test_revoke_by_stamped_headers_after_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + wait_for_revoke_timeout: int, + canvas: Signature, + ): + result: AsyncResult = canvas.apply_async( + queue=celery_latest_worker.worker_queue + ) + result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) + dev_worker.assert_log_does_not_exist( + "Done waiting", + timeout=wait_for_revoke_timeout, + ) + + def test_revoke_by_stamped_headers_before_publish( + self, + dev_worker: CeleryTestWorker, + celery_latest_worker: CeleryTestWorker, + canvas: Signature, + ): + dev_worker.app.control.revoke_by_stamped_headers( + StampOnReplace.stamp, + terminate=True, + ) + canvas.apply_async(queue=celery_latest_worker.worker_queue) + dev_worker.assert_log_exists("Discarding revoked task") + dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/tests/stamping/test_stamping.py b/t/smoke/tests/stamping/test_stamping.py deleted file mode 100644 index fd10da44939..00000000000 --- a/t/smoke/tests/stamping/test_stamping.py +++ /dev/null @@ -1,263 +0,0 @@ -from __future__ import annotations - -import json - -import pytest -from pytest_celery import (RESULT_TIMEOUT, CeleryBackendCluster, CeleryTestSetup, CeleryTestWorker, - CeleryWorkerCluster) - -from celery.canvas import Signature, StampingVisitor, chain -from celery.result import AsyncResult -from t.integration.tasks import StampOnReplace, add, identity, replace_with_stamped_task -from t.smoke.tests.stamping.tasks import wait_for_revoke -from t.smoke.workers.dev import SmokeWorkerContainer -from t.smoke.workers.legacy import CeleryLegacyWorkerContainer - - -@pytest.fixture -def dev_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - if worker.version == SmokeWorkerContainer.version(): - return worker - return None - - -@pytest.fixture -def legacy_worker(celery_setup: CeleryTestSetup) -> CeleryTestWorker: - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - if worker.version == CeleryLegacyWorkerContainer.version(): - return worker - return None - - -class test_stamping: - def test_callback(self, dev_worker: CeleryTestWorker): - on_signature_stamp = {"on_signature_stamp": 4} - no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} - on_callback_stamp = {"on_callback_stamp": 2} - link_stamp = { - **on_signature_stamp, - **no_visitor_stamp, - **on_callback_stamp, - } - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return on_signature_stamp.copy() - - def on_callback(self, callback, **header) -> dict: - return on_callback_stamp.copy() - - stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) - stamped_task.link( - add.s(0) - .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) - .set(queue=dev_worker.worker_queue) - ) - stamped_task.stamp(visitor=CustomStampingVisitor()) - stamped_task.delay().get(timeout=RESULT_TIMEOUT) - assert dev_worker.logs().count( - json.dumps(on_signature_stamp, indent=4, sort_keys=True) - ) - assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) - - -class test_stamping_hybrid_worker_cluster: - @pytest.fixture( - # Each param item is a list of workers to be used in the cluster - # and each cluster will be tested separately (with parallel support) - params=[ - ["celery_setup_worker"], - ["celery_legacy_worker"], - ["celery_setup_worker", "celery_legacy_worker"], - ["celery_setup_worker", "celery_latest_worker", "celery_legacy_worker"], - ] - ) - def celery_worker_cluster( - self, - request: pytest.FixtureRequest, - ) -> CeleryWorkerCluster: - nodes: tuple[CeleryTestWorker] = [ - request.getfixturevalue(worker) for worker in request.param - ] - cluster = CeleryWorkerCluster(*nodes) - yield cluster - cluster.teardown() - - def test_sanity(self, celery_setup: CeleryTestSetup): - stamp = {"stamp": 42} - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return stamp.copy() - - worker: CeleryTestWorker - for worker in celery_setup.worker_cluster: - queue = worker.worker_queue - stamped_task = identity.si(123) - stamped_task.stamp(visitor=CustomStampingVisitor()) - assert stamped_task.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) - assert worker.logs().count(json.dumps(stamp, indent=4, sort_keys=True)) - - def test_sanity_worker_hop(self, celery_setup: CeleryTestSetup): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": 42} - - class CustomStampingVisitor(StampingVisitor): - def on_signature(self, sig, **headers) -> dict: - return stamp.copy() - - w1: CeleryTestWorker = celery_setup.worker_cluster[0] - w2: CeleryTestWorker = celery_setup.worker_cluster[1] - stamped_task = chain( - identity.si(4).set(queue=w1.worker_queue), - identity.si(2).set(queue=w2.worker_queue), - ) - stamped_task.stamp(visitor=CustomStampingVisitor()) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp = json.dumps(stamp, indent=4) - worker: CeleryTestWorker - for worker in (w1, w2): - assert worker.logs().count(stamp) - - def test_multiple_stamps_multiple_workers(self, celery_setup: CeleryTestSetup): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": 420} - stamp1 = {**stamp, "stamp1": 4} - stamp2 = {**stamp, "stamp2": 2} - - w1: CeleryTestWorker = celery_setup.worker_cluster[0] - w2: CeleryTestWorker = celery_setup.worker_cluster[1] - stamped_task = chain( - identity.si(4).set(queue=w1.worker_queue).stamp(stamp1=stamp1["stamp1"]), - identity.si(2).set(queue=w2.worker_queue).stamp(stamp2=stamp2["stamp2"]), - ) - stamped_task.stamp(stamp=stamp["stamp"]) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp1 = json.dumps(stamp1, indent=4) - stamp2 = json.dumps(stamp2, indent=4) - - assert w1.logs().count(stamp1) - assert w1.logs().count(stamp2) == 0 - - assert w2.logs().count(stamp1) == 0 - assert w2.logs().count(stamp2) - - def test_stamping_on_replace_with_legacy_worker_in_cluster( - self, - celery_setup: CeleryTestSetup, - dev_worker: CeleryTestWorker, - legacy_worker: CeleryTestWorker, - ): - if len(celery_setup.worker_cluster) < 2: - pytest.skip("Not enough workers in cluster") - - stamp = {"stamp": "Only for dev worker tasks"} - stamp1 = {**StampOnReplace.stamp, "stamp1": "1) Only for legacy worker tasks"} - stamp2 = {**StampOnReplace.stamp, "stamp2": "2) Only for legacy worker tasks"} - - replaced_sig1 = ( - identity.si(4) - .set(queue=legacy_worker.worker_queue) - .stamp(stamp1=stamp1["stamp1"]) - ) - replaced_sig2 = ( - identity.si(2) - .set(queue=legacy_worker.worker_queue) - .stamp(stamp2=stamp2["stamp2"]) - ) - - stamped_task = chain( - replace_with_stamped_task.si(replace_with=replaced_sig1).set( - queue=dev_worker.worker_queue - ), - replace_with_stamped_task.si(replace_with=replaced_sig2).set( - queue=dev_worker.worker_queue - ), - ) - stamped_task.stamp(stamp=stamp["stamp"]) - stamped_task.apply_async().get(timeout=RESULT_TIMEOUT) - - stamp = json.dumps(stamp, indent=4) - stamp1 = json.dumps(stamp1, indent=4) - stamp2 = json.dumps(stamp2, indent=4) - - assert dev_worker.logs().count(stamp) - assert dev_worker.logs().count(stamp1) == 0 - assert dev_worker.logs().count(stamp2) == 0 - - assert legacy_worker.logs().count(stamp) == 0 - assert legacy_worker.logs().count(stamp1) - assert legacy_worker.logs().count(stamp2) - - -class test_revoke_by_stamped_headers: - @pytest.fixture - def celery_worker_cluster( - self, - celery_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - ) -> CeleryWorkerCluster: - cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) - yield cluster - cluster.teardown() - - @pytest.fixture - def celery_backend_cluster(self) -> CeleryBackendCluster: - # Disable backend - return None - - @pytest.fixture - def wait_for_revoke_timeout(self) -> int: - return 4 - - @pytest.fixture - def canvas( - self, - dev_worker: CeleryTestWorker, - wait_for_revoke_timeout: int, - ) -> Signature: - return chain( - identity.s(wait_for_revoke_timeout), - wait_for_revoke.s(waitfor_worker_queue=dev_worker.worker_queue).set( - queue=dev_worker.worker_queue - ), - ) - - def test_revoke_by_stamped_headers_after_publish( - self, - dev_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - wait_for_revoke_timeout: int, - canvas: Signature, - ): - result: AsyncResult = canvas.apply_async( - queue=celery_latest_worker.worker_queue - ) - result.revoke_by_stamped_headers(StampOnReplace.stamp, terminate=True) - dev_worker.assert_log_does_not_exist( - "Done waiting", - timeout=wait_for_revoke_timeout, - ) - - def test_revoke_by_stamped_headers_before_publish( - self, - dev_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, - canvas: Signature, - ): - dev_worker.app.control.revoke_by_stamped_headers( - StampOnReplace.stamp, - terminate=True, - ) - canvas.apply_async(queue=celery_latest_worker.worker_queue) - dev_worker.assert_log_exists("Discarding revoked task") - dev_worker.assert_log_exists(f"revoked by header: {StampOnReplace.stamp}") diff --git a/t/smoke/tests/stamping/test_visitor.py b/t/smoke/tests/stamping/test_visitor.py new file mode 100644 index 00000000000..c64991f35d5 --- /dev/null +++ b/t/smoke/tests/stamping/test_visitor.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +import json + +from pytest_celery import RESULT_TIMEOUT, CeleryTestWorker + +from celery.canvas import StampingVisitor +from t.integration.tasks import add, identity + + +class test_stamping_visitor: + def test_callback(self, dev_worker: CeleryTestWorker): + on_signature_stamp = {"on_signature_stamp": 4} + no_visitor_stamp = {"no_visitor_stamp": "Stamp without visitor"} + on_callback_stamp = {"on_callback_stamp": 2} + link_stamp = { + **on_signature_stamp, + **no_visitor_stamp, + **on_callback_stamp, + } + + class CustomStampingVisitor(StampingVisitor): + def on_signature(self, sig, **headers) -> dict: + return on_signature_stamp.copy() + + def on_callback(self, callback, **header) -> dict: + return on_callback_stamp.copy() + + stamped_task = identity.si(123).set(queue=dev_worker.worker_queue) + stamped_task.link( + add.s(0) + .stamp(no_visitor_stamp=no_visitor_stamp["no_visitor_stamp"]) + .set(queue=dev_worker.worker_queue) + ) + stamped_task.stamp(visitor=CustomStampingVisitor()) + stamped_task.delay().get(timeout=RESULT_TIMEOUT) + assert dev_worker.logs().count( + json.dumps(on_signature_stamp, indent=4, sort_keys=True) + ) + assert dev_worker.logs().count(json.dumps(link_stamp, indent=4, sort_keys=True)) diff --git a/t/smoke/workers/legacy.py b/t/smoke/tests/stamping/workers/legacy.py similarity index 80% rename from t/smoke/workers/legacy.py rename to t/smoke/tests/stamping/workers/legacy.py index 42a3952d575..385c7c5762b 100644 --- a/t/smoke/workers/legacy.py +++ b/t/smoke/tests/stamping/workers/legacy.py @@ -7,7 +7,7 @@ from celery import Celery -class CeleryLegacyWorkerContainer(CeleryWorkerContainer): +class LegacyWorkerContainer(CeleryWorkerContainer): @property def client(self) -> Any: return self @@ -22,18 +22,18 @@ def log_level(cls) -> str: @classmethod def worker_name(cls) -> str: - return "celery4_tests_worker" + return "celery_legacy_tests_worker" @classmethod def worker_queue(cls) -> str: - return "celery4_tests_queue" + return "celery_legacy_tests_queue" celery_legacy_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/pypi", tag="t/smoke/worker:legacy", - buildargs=CeleryLegacyWorkerContainer.buildargs(), + buildargs=LegacyWorkerContainer.buildargs(), ) @@ -42,14 +42,14 @@ def worker_queue(cls) -> str: environment=fxtr("default_worker_env"), network="{default_pytest_celery_network.name}", volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, - wrapper_class=CeleryLegacyWorkerContainer, + wrapper_class=LegacyWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, ) @pytest.fixture def celery_legacy_worker( - celery_legacy_worker_container: CeleryLegacyWorkerContainer, + celery_legacy_worker_container: LegacyWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: worker = CeleryTestWorker(celery_legacy_worker_container, app=celery_setup_app) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 289a537da9b..162db9bfc70 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -11,9 +11,9 @@ class test_replace: def celery_worker_cluster( self, celery_worker: CeleryTestWorker, - celery_latest_worker: CeleryTestWorker, + celery_other_dev_worker: CeleryTestWorker, ) -> CeleryWorkerCluster: - cluster = CeleryWorkerCluster(celery_worker, celery_latest_worker) + cluster = CeleryWorkerCluster(celery_worker, celery_other_dev_worker) yield cluster cluster.teardown() diff --git a/t/smoke/workers/alt.py b/t/smoke/workers/alt.py index b333f2616e3..63dbd673d67 100644 --- a/t/smoke/workers/alt.py +++ b/t/smoke/workers/alt.py @@ -1,16 +1,31 @@ +from __future__ import annotations + import os import pytest from pytest_celery import CeleryTestWorker, defaults -from pytest_docker_tools import container, fxtr +from pytest_docker_tools import build, container, fxtr from celery import Celery from t.smoke.workers.dev import SmokeWorkerContainer -# Allows having two different workers with the same queue and settings -# that are based on the current codebase + +class AltSmokeWorkerContainer(SmokeWorkerContainer): + @classmethod + def worker_name(cls) -> str: + return "alt_smoke_tests_worker" + + +celery_alt_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:alt", + buildargs=AltSmokeWorkerContainer.buildargs(), +) + + alt_dev_worker_container = container( - image="{celery_dev_worker_image.id}", + image="{celery_alt_dev_worker_image.id}", environment=fxtr("default_worker_env"), network="{default_pytest_celery_network.name}", volumes={ @@ -22,14 +37,14 @@ "mode": "rw", }, }, - wrapper_class=SmokeWorkerContainer, + wrapper_class=AltSmokeWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, ) @pytest.fixture def celery_alt_dev_worker( - alt_dev_worker_container: SmokeWorkerContainer, + alt_dev_worker_container: AltSmokeWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: worker = CeleryTestWorker(alt_dev_worker_container, app=celery_setup_app) diff --git a/t/smoke/workers/other.py b/t/smoke/workers/other.py new file mode 100644 index 00000000000..28a24cb38c0 --- /dev/null +++ b/t/smoke/workers/other.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import os + +import pytest +from pytest_celery import CeleryTestWorker, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery +from t.smoke.workers.dev import SmokeWorkerContainer + + +class OtherSmokeWorkerContainer(SmokeWorkerContainer): + @classmethod + def worker_name(cls) -> str: + return "other_smoke_tests_worker" + + @classmethod + def worker_queue(cls) -> str: + return "other_smoke_tests_queue" + + +celery_other_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:other", + buildargs=OtherSmokeWorkerContainer.buildargs(), +) + + +other_dev_worker_container = container( + image="{celery_other_dev_worker_image.id}", + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=OtherSmokeWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, +) + + +@pytest.fixture +def celery_other_dev_worker( + other_dev_worker_container: OtherSmokeWorkerContainer, + celery_setup_app: Celery, +) -> CeleryTestWorker: + worker = CeleryTestWorker(other_dev_worker_container, app=celery_setup_app) + yield worker + worker.teardown() From f1b367b83c594414d7883ca3255ad64debf302c3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 11 Dec 2023 22:15:38 +0200 Subject: [PATCH 0613/1051] Refactored worker smoke tests utilities (#8712) * Fixed imports in smoke tests * Refactored WorkerOperations in smoke tests * Use dataclass for worker termination operation options instead of plain dict * Using get(timeout=RESULT_TIMEOUT) * Reload worker container obj after termination/restart * Added cleanup to suicide_exhaust_hdd() * Reverted "Run all tests in CI together (to be reverted)" * Run smoke tests CI only after integration tests (finally) * --reruns-delay 10 -> 60 for smoke tests * BaseException -> Exception * Disabled Redis Broker in Smoke Tests - Redis Broker feature is too unstable * Improved stability of smoke tests * Configure back Redis Broker for smoke tests * Cleanup and renaming * Added TODO * t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError * Renamed WorkerOperations -> SuiteOperations * Refactored SuiteOperations code into separated modules --- .github/workflows/python-package.yml | 194 +++++++++--------- t/smoke/conftest.py | 11 + t/smoke/operations/__init__.py | 0 t/smoke/operations/task_termination.py | 78 +++++++ t/smoke/operations/worker_kill.py | 33 +++ t/smoke/operations/worker_restart.py | 34 +++ t/smoke/tasks.py | 136 ++++++++++-- t/smoke/tests/__init__.py | 0 t/smoke/tests/conftest.py | 63 ------ t/smoke/tests/failover/__init__.py | 0 .../tests/failover/test_broker_failover.py | 2 +- .../tests/failover/test_worker_failover.py | 43 ++-- t/smoke/tests/stamping/workers/__init__.py | 0 t/smoke/tests/test_canvas.py | 2 +- t/smoke/tests/test_control.py | 10 +- t/smoke/tests/test_worker.py | 21 +- tox.ini | 2 +- 17 files changed, 414 insertions(+), 215 deletions(-) create mode 100644 t/smoke/operations/__init__.py create mode 100644 t/smoke/operations/task_termination.py create mode 100644 t/smoke/operations/worker_kill.py create mode 100644 t/smoke/operations/worker_restart.py create mode 100644 t/smoke/tests/__init__.py delete mode 100644 t/smoke/tests/conftest.py create mode 100644 t/smoke/tests/failover/__init__.py create mode 100644 t/smoke/tests/stamping/workers/__init__.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d68297ea641..1dd4d7a2b92 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -23,102 +23,102 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - Unit: - - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] - os: ["ubuntu-latest", "windows-latest"] - exclude: - - python-version: '3.9' - os: "windows-latest" - - python-version: 'pypy-3.10' - os: "windows-latest" - - python-version: '3.10' - os: "windows-latest" - - python-version: '3.11' - os: "windows-latest" - - steps: - - name: Install apt packages - if: startsWith(matrix.os, 'ubuntu-') - run: | - sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - - name: Install tox - run: python -m pip install --upgrade pip 'tox' tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-unit" - timeout-minutes: 30 - run: | - tox --verbose --verbose - - - uses: codecov/codecov-action@v3 - with: - flags: unittests # optional - fail_ci_if_error: true # optional (default = false) - verbose: true # optional (default = false) - - Integration: - # needs: - # - Unit - # if: needs.Unit.result == 'success' - # timeout-minutes: 240 - - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] - - services: - redis: - image: redis - ports: - - 6379:6379 - env: - REDIS_HOST: localhost - REDIS_PORT: 6379 - rabbitmq: - image: rabbitmq - ports: - - 5672:5672 - env: - RABBITMQ_DEFAULT_USER: guest - RABBITMQ_DEFAULT_PASS: guest - - steps: - - name: Install apt packages - run: | - sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip 'tox' tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - timeout-minutes: 60 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + # Unit: + + # runs-on: ${{ matrix.os }} + # strategy: + # fail-fast: false + # matrix: + # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] + # os: ["ubuntu-latest", "windows-latest"] + # exclude: + # - python-version: '3.9' + # os: "windows-latest" + # - python-version: 'pypy-3.10' + # os: "windows-latest" + # - python-version: '3.10' + # os: "windows-latest" + # - python-version: '3.11' + # os: "windows-latest" + + # steps: + # - name: Install apt packages + # if: startsWith(matrix.os, 'ubuntu-') + # run: | + # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + # - uses: actions/checkout@v4 + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + # cache: 'pip' + # cache-dependency-path: '**/setup.py' + + # - name: Install tox + # run: python -m pip install --upgrade pip 'tox' tox-gh-actions + # - name: > + # Run tox for + # "${{ matrix.python-version }}-unit" + # timeout-minutes: 30 + # run: | + # tox --verbose --verbose + + # - uses: codecov/codecov-action@v3 + # with: + # flags: unittests # optional + # fail_ci_if_error: true # optional (default = false) + # verbose: true # optional (default = false) + + # Integration: + # needs: + # - Unit + # if: needs.Unit.result == 'success' + # timeout-minutes: 240 + + # runs-on: ubuntu-latest + # strategy: + # fail-fast: false + # matrix: + # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + # toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] + + # services: + # redis: + # image: redis + # ports: + # - 6379:6379 + # env: + # REDIS_HOST: localhost + # REDIS_PORT: 6379 + # rabbitmq: + # image: rabbitmq + # ports: + # - 5672:5672 + # env: + # RABBITMQ_DEFAULT_USER: guest + # RABBITMQ_DEFAULT_PASS: guest + + # steps: + # - name: Install apt packages + # run: | + # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + + # - uses: actions/checkout@v4 + # - name: Set up Python ${{ matrix.python-version }} + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + # cache: 'pip' + # cache-dependency-path: '**/setup.py' + # - name: Install tox + # run: python -m pip install --upgrade pip 'tox' tox-gh-actions + # - name: > + # Run tox for + # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" + # timeout-minutes: 60 + # run: > + # tox --verbose --verbose -e + # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv Smoke: # needs: @@ -154,7 +154,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 + timeout-minutes: 60 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index f7ed5436790..25687325dbd 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -4,12 +4,23 @@ from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer from pytest_docker_tools import container, fetch, network +from t.smoke.operations.task_termination import TaskTermination +from t.smoke.operations.worker_kill import WorkerKill +from t.smoke.operations.worker_restart import WorkerRestart from t.smoke.workers.alt import * # noqa from t.smoke.workers.dev import * # noqa from t.smoke.workers.latest import * # noqa from t.smoke.workers.other import * # noqa +class SuiteOperations( + TaskTermination, + WorkerKill, + WorkerRestart, +): + pass + + @pytest.fixture def default_worker_tasks(default_worker_tasks: set) -> set: from t.integration import tasks as integration_tests_tasks diff --git a/t/smoke/operations/__init__.py b/t/smoke/operations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py new file mode 100644 index 00000000000..d51f64da307 --- /dev/null +++ b/t/smoke/operations/task_termination.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + +from celery.exceptions import TimeLimitExceeded, WorkerLostError +from t.smoke.tasks import suicide + + +class TaskTermination: + class Method(Enum): + DELAY_TIMEOUT = auto() + CPU_OVERLOAD = auto() + EXCEPTION = auto() + SYSTEM_EXIT = auto() + ALLOCATE_MAX_MEMORY = auto() + EXHAUST_MEMORY = auto() + EXHAUST_HDD = auto() + CONTROL_SHUTDOWN = auto() + SIGKILL = auto() + + @dataclass + class Options: + worker: CeleryTestWorker + method: str + allocate: int + large_file_name: str + hostname: str + try_eager: bool = True + time_limit: int = 4 + cpu_load_factor: int = 420 + + def run_suicide_task( + self, + worker: CeleryTestWorker, + method: TaskTermination.Method, + **options: dict, + ): + # Update kwargs with default values for missing keys + defaults = { + "worker": worker, + "method": method.name, + "allocate": worker.app.conf.worker_max_memory_per_child * 10**9, + "large_file_name": worker.name(), + "hostname": worker.hostname(), + } + options = {**defaults, **options} + options = TaskTermination.Options(**options) + + expected_error = { + TaskTermination.Method.DELAY_TIMEOUT: TimeLimitExceeded, + TaskTermination.Method.CPU_OVERLOAD: RecursionError, + TaskTermination.Method.EXCEPTION: Exception, + TaskTermination.Method.SYSTEM_EXIT: WorkerLostError, + TaskTermination.Method.ALLOCATE_MAX_MEMORY: MemoryError, + TaskTermination.Method.EXHAUST_MEMORY: WorkerLostError, + TaskTermination.Method.EXHAUST_HDD: OSError, + TaskTermination.Method.SIGKILL: WorkerLostError, + }.get(method) + + try: + suicide(**options.__dict__) + except Exception as e: + if expected_error is None: + # No specific error expected, this is an unexpected exception + assert ( + False + ), f"Worker termination by '{method.name}' failed due to an unexpected error: {e}" + + if not isinstance(e, expected_error): + # Specific error expected but an unexpected type of error occurred + assert ( + False + ), f"Worker termination by '{method.name}' failed due to a different error: {e}" + finally: + worker.container.reload() diff --git a/t/smoke/operations/worker_kill.py b/t/smoke/operations/worker_kill.py new file mode 100644 index 00000000000..6a4af26b383 --- /dev/null +++ b/t/smoke/operations/worker_kill.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + +from celery.app.control import Control + + +class WorkerKill: + class Method(Enum): + DOCKER_KILL = auto() + CONTROL_SHUTDOWN = auto() + + def kill_worker( + self, + worker: CeleryTestWorker, + method: WorkerKill.Method, + assertion: bool = True, + ): + if method == WorkerKill.Method.DOCKER_KILL: + worker.kill() + + if method == WorkerKill.Method.CONTROL_SHUTDOWN: + control: Control = worker.app.control + control.shutdown(destination=[worker.hostname()]) + worker.container.reload() + + if assertion: + assert worker.container.status == "exited", ( + f"Worker container should be in 'exited' state after kill, " + f"but is in '{worker.container.status}' state instead." + ) diff --git a/t/smoke/operations/worker_restart.py b/t/smoke/operations/worker_restart.py new file mode 100644 index 00000000000..58d87c9def0 --- /dev/null +++ b/t/smoke/operations/worker_restart.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +from enum import Enum, auto + +from pytest_celery import CeleryTestWorker + + +class WorkerRestart: + class Method(Enum): + POOL_RESTART = auto() + DOCKER_RESTART_GRACEFULLY = auto() + DOCKER_RESTART_FORCE = auto() + + def restart_worker( + self, + worker: CeleryTestWorker, + method: WorkerRestart.Method, + assertion: bool = True, + ): + if method == WorkerRestart.Method.POOL_RESTART: + worker.app.control.pool_restart() + worker.container.reload() + + if method == WorkerRestart.Method.DOCKER_RESTART_GRACEFULLY: + worker.restart() + + if method == WorkerRestart.Method.DOCKER_RESTART_FORCE: + worker.restart(force=True) + + if assertion: + assert worker.container.status == "running", ( + f"Worker container should be in 'running' state after restart, " + f"but is in '{worker.container.status}' state instead." + ) diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index d7b3f929461..549cfb0406a 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,10 +1,15 @@ from __future__ import annotations +import math +import os +import sys +from signal import SIGKILL from sys import getsizeof from time import sleep import celery.utils from celery import Task, shared_task, signature +from celery.app.control import Control from celery.canvas import Signature from t.integration.tasks import * # noqa from t.integration.tasks import replaced_with_me @@ -16,12 +21,7 @@ def noop(*args, **kwargs) -> None: @shared_task -def long_running_task( - seconds: float = 1, - verbose: bool = False, - allocate: int | None = None, - exhaust_memory: bool = False, -) -> bool: +def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: from celery import current_task from celery.utils.log import get_task_logger @@ -29,15 +29,6 @@ def long_running_task( logger.info("Starting long running task") - if allocate: - # Attempt to allocate megabytes in memory - _ = [0] * (allocate * 10**6 // getsizeof(int())) - - if exhaust_memory: - mem = [] - while True: - mem.append(' ' * 10**6) # 1 MB of spaces - for i in range(0, int(seconds)): sleep(1) if verbose: @@ -53,3 +44,118 @@ def replace_with_task(self: Task, replace_with: Signature = None): if replace_with is None: replace_with = replaced_with_me.s() return self.replace(signature(replace_with)) + + +@shared_task +def suicide(method: str, try_eager: bool = True, **options: dict): + termination_method = { + "DELAY_TIMEOUT": suicide_delay_timeout.si( + time_limit=options["time_limit"], + ), + "CPU_OVERLOAD": suicide_cpu_overload.si( + cpu_load_factor=options["cpu_load_factor"] + ), + "EXCEPTION": suicide_exception.si(), + "SYSTEM_EXIT": suicide_system_exit.si(), + "ALLOCATE_MAX_MEMORY": suicide_allocate_max_memory.si( + allocate=options["allocate"] + ), + "EXHAUST_MEMORY": suicide_exhaust_memory.si(), + "EXHAUST_HDD": suicide_exhaust_hdd.si( + large_file_name=options["large_file_name"] + ), + "CONTROL_SHUTDOWN": suicide_control_shutdown.si( + hostname=options["hostname"], + ), + "SIGKILL": suicide_sigkill.si(), + } + + sig = termination_method.get(method) + if sig: + if try_eager and method in { + "CONTROL_SHUTDOWN", + }: + return sig.apply().get() + + worker = options["worker"] + return sig.apply_async(queue=worker.worker_queue).get() + else: + raise ValueError(f"Unsupported termination method: {method}") + + +@shared_task(time_limit=2) +def suicide_delay_timeout(time_limit: int = 4): + """Delays the execution to simulate a task timeout.""" + sleep(time_limit) + + +@shared_task +def suicide_cpu_overload(cpu_load_factor: int = 420): + """Performs CPU-intensive operations to simulate a CPU overload.""" + + def cpu_intensive_calculation(n): + return cpu_intensive_calculation(math.sin(n)) + + cpu_intensive_calculation(cpu_load_factor) + + +@shared_task +def suicide_exception(): + """Raises an exception to simulate an unexpected error during task execution.""" + raise Exception("Simulated task failure due to an exception.") + + +@shared_task +def suicide_system_exit(): + """Triggers a system exit to simulate a critical stop of the Celery worker.""" + sys.exit("Simulated Celery worker stop via system exit.") + + +@shared_task +def suicide_allocate_max_memory(allocate: int): + """Allocates the maximum amount of memory permitted, potentially leading to memory errors.""" + _ = [0] * (allocate // getsizeof(int())) + + +@shared_task +def suicide_exhaust_memory(): + """Continuously allocates memory to simulate memory exhaustion.""" + mem = [] + while True: + mem.append(" " * 10**6) + + +@shared_task +def suicide_exhaust_hdd(large_file_name: str = "large_file"): + """Consumes disk space in /tmp to simulate a scenario where the disk is getting full.""" + # file_path = f"/tmp/{large_file_name}.tmp" + # try: + # with open(file_path, "wb") as f: + # chunk = b"\0" * 42 * 1024**2 # 42 MB + # while True: + # f.write(chunk) + # finally: + # if os.path.exists(file_path): + # os.remove(file_path) + + # This code breaks GitHub CI so we simulate the same error as best effort + ######################################################################### + # [error]Failed to create step summary using 'GITHUB_STEP_SUMMARY': No space left on device + # [error]No space left on device + raise OSError("No space left on device") + + +@shared_task +def suicide_control_shutdown(hostname: str): + """Initiates a controlled shutdown via the Control API.""" + from celery.app.base import get_current_app + + app = get_current_app() + control: Control = app.control + control.shutdown(destination=[hostname]) + + +@shared_task +def suicide_sigkill(): + """Forceful termination.""" + os.kill(os.getpid(), SIGKILL) diff --git a/t/smoke/tests/__init__.py b/t/smoke/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/conftest.py b/t/smoke/tests/conftest.py deleted file mode 100644 index 16f550c9167..00000000000 --- a/t/smoke/tests/conftest.py +++ /dev/null @@ -1,63 +0,0 @@ -from __future__ import annotations - -from enum import Enum, auto - -from billiard.exceptions import WorkerLostError -from pytest_celery import CeleryTestSetup, CeleryTestWorker - -from celery.app.control import Control -from t.smoke.tasks import long_running_task - - -class WorkerOperations: - class TerminationMethod(Enum): - SIGKILL = auto() - CONTROL_SHUTDOWN = auto() - MAX_MEMORY_ALLOCATED = auto() - MEMORY_LIMIT_EXCEEDED = auto() - - class RestartMethod(Enum): - POOL_RESTART = auto() - DOCKER_RESTART_GRACEFULLY = auto() - DOCKER_RESTART_FORCE = auto() - - def terminate(self, worker: CeleryTestWorker, method: TerminationMethod): - if method == WorkerOperations.TerminationMethod.SIGKILL: - worker.kill() - return - - if method == WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN: - control: Control = worker.app.control - control.shutdown(destination=[worker.hostname()]) - return - - if method == WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED: - allocate = worker.app.conf.worker_max_memory_per_child * 10**6 - try: - ( - long_running_task.si(allocate=allocate) - .apply_async(queue=worker.worker_queue) - .get() - ) - except MemoryError: - return - - if method == WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED: - try: - ( - long_running_task.si(exhaust_memory=True) - .apply_async(queue=worker.worker_queue) - .get() - ) - except WorkerLostError: - return - - assert False - - def restart(self, celery_setup: CeleryTestSetup, method: RestartMethod): - if method == WorkerOperations.RestartMethod.POOL_RESTART: - celery_setup.app.control.pool_restart() - elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY: - celery_setup.worker.restart() - elif method == WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE: - celery_setup.worker.restart(force=True) diff --git a/t/smoke/tests/failover/__init__.py b/t/smoke/tests/failover/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/failover/test_broker_failover.py b/t/smoke/tests/failover/test_broker_failover.py index bfcaa86a688..be41cdcce43 100644 --- a/t/smoke/tests/failover/test_broker_failover.py +++ b/t/smoke/tests/failover/test_broker_failover.py @@ -3,7 +3,7 @@ RabbitMQContainer, RabbitMQTestBroker) from pytest_docker_tools import container, fxtr -from t.smoke.tasks import identity +from t.integration.tasks import identity failover_broker = container( image="{default_rabbitmq_broker_image}", diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index b555054e38f..ae235168266 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -1,11 +1,13 @@ from __future__ import annotations import pytest -from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker from celery import Celery +from t.smoke.conftest import SuiteOperations, WorkerKill from t.smoke.tasks import long_running_task -from t.smoke.tests.conftest import WorkerOperations + +MB = 1024 * 1024 @pytest.fixture @@ -18,50 +20,47 @@ def celery_worker_cluster( cluster.teardown() -@pytest.mark.parametrize( - "termination_method", - [ - WorkerOperations.TerminationMethod.SIGKILL, - WorkerOperations.TerminationMethod.CONTROL_SHUTDOWN, - WorkerOperations.TerminationMethod.MAX_MEMORY_ALLOCATED, - WorkerOperations.TerminationMethod.MEMORY_LIMIT_EXCEEDED, - ], -) -class test_worker_failover(WorkerOperations): +@pytest.mark.parametrize("method", [WorkerKill.Method.DOCKER_KILL]) +class test_worker_failover(SuiteOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 10 * 1024 # Limit to 10MB + app.conf.worker_max_memory_per_child = 10 * MB if app.conf.broker_url.startswith("redis"): + # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} yield app def test_killing_first_worker( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill.Method, ): + assert len(celery_setup.worker_cluster) > 1 + queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=2) is True - self.terminate(celery_setup.worker, termination_method) + assert res.get(timeout=RESULT_TIMEOUT) is True + self.kill_worker(celery_setup.worker, method) sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=2) is True + assert res.get(timeout=RESULT_TIMEOUT) is True def test_reconnect_to_restarted_worker( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill.Method, ): + assert len(celery_setup.worker_cluster) > 1 + queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() assert res.get(timeout=10) is True for worker in celery_setup.worker_cluster: - self.terminate(worker, termination_method) + self.kill_worker(worker, method) celery_setup.worker.restart() sig = long_running_task.si(1).set(queue=queue) res = sig.delay() @@ -70,8 +69,10 @@ def test_reconnect_to_restarted_worker( def test_task_retry_on_worker_crash( self, celery_setup: CeleryTestSetup, - termination_method: WorkerOperations.TerminationMethod, + method: WorkerKill, ): + assert len(celery_setup.worker_cluster) > 1 + if isinstance(celery_setup.broker, RedisTestBroker): pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") @@ -80,5 +81,5 @@ def test_task_retry_on_worker_crash( sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run - self.terminate(celery_setup.worker, termination_method) + self.kill_worker(celery_setup.worker, method) assert res.get(timeout=10) is True diff --git a/t/smoke/tests/stamping/workers/__init__.py b/t/smoke/tests/stamping/workers/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 965ac5e3179..e25aaaffc28 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -2,7 +2,7 @@ from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature -from t.smoke.tasks import add, identity +from t.integration.tasks import add, identity class test_signature: diff --git a/t/smoke/tests/test_control.py b/t/smoke/tests/test_control.py index edd108b36e7..5a911524186 100644 --- a/t/smoke/tests/test_control.py +++ b/t/smoke/tests/test_control.py @@ -4,10 +4,16 @@ class test_control: def test_sanity(self, celery_setup: CeleryTestSetup): r = celery_setup.app.control.ping() - assert all([all([res["ok"] == "pong" for _, res in response.items()]) for response in r]) + assert all( + [ + all([res["ok"] == "pong" for _, res in response.items()]) + for response in r + ] + ) def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): + # TODO: celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) celery_setup.app.control.shutdown() while celery_setup.worker.container.status != "exited": celery_setup.worker.container.reload() - assert celery_setup.worker.container.attrs['State']['ExitCode'] == 0 + assert celery_setup.worker.container.attrs["State"]["ExitCode"] == 0 diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 8a2713c9179..182efc700e7 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -3,19 +3,12 @@ from celery import Celery from celery.canvas import chain +from t.smoke.conftest import SuiteOperations, WorkerRestart from t.smoke.tasks import long_running_task -from t.smoke.tests.conftest import WorkerOperations -@pytest.mark.parametrize( - "restart_method", - [ - WorkerOperations.RestartMethod.POOL_RESTART, - WorkerOperations.RestartMethod.DOCKER_RESTART_GRACEFULLY, - WorkerOperations.RestartMethod.DOCKER_RESTART_FORCE, - ], -) -class test_worker_restart(WorkerOperations): +@pytest.mark.parametrize("method", list(WorkerRestart.Method)) +class test_worker_restart(SuiteOperations): @pytest.fixture def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app @@ -26,18 +19,18 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: WorkerOperations.RestartMethod, + method: WorkerRestart, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - self.restart(celery_setup, restart_method) + self.restart_worker(celery_setup.worker, method) assert res.get(RESULT_TIMEOUT) is True def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - restart_method: WorkerOperations.RestartMethod, + method: WorkerRestart, ): queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) @@ -47,5 +40,5 @@ def test_restart_between_task_execution( sig = chain(first, second) sig.delay() assert first_res.get(RESULT_TIMEOUT) is True - self.restart(celery_setup, restart_method) + self.restart_worker(celery_setup.worker, method) assert second_res.get(RESULT_TIMEOUT) is True diff --git a/tox.ini b/tox.ini index e4b27ef70c7..cb0cca1a719 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke --reruns 5 --reruns-delay 10 --rerun-except AssertionError {posargs} + smoke: pytest -xsv t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From d03c810a0e7b3969826573e49caae1d2b7381a21 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 18:29:09 +0200 Subject: [PATCH 0614/1051] Hotfix (#8717) * Removed useless test: test_task_retry_on_worker_crash() * Completed TODO in test_shutdown_exit_with_zero() * Increased worker memory for test_worker_failover from 10MB to 100MB * Updated pytest-xdist to v3.5+ --- .../tests/failover/test_worker_failover.py | 26 +++---------------- t/smoke/tests/test_control.py | 3 +-- t/smoke/tests/test_worker.py | 1 + tox.ini | 2 +- 4 files changed, 7 insertions(+), 25 deletions(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index ae235168266..b3b7b788f73 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -1,7 +1,7 @@ from __future__ import annotations import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster, RedisTestBroker +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster from celery import Celery from t.smoke.conftest import SuiteOperations, WorkerKill @@ -26,7 +26,7 @@ class test_worker_failover(SuiteOperations): def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 10 * MB + app.conf.worker_max_memory_per_child = 100 * MB if app.conf.broker_url.startswith("redis"): # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} @@ -58,28 +58,10 @@ def test_reconnect_to_restarted_worker( queue = celery_setup.worker.worker_queue sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=10) is True + assert res.get(timeout=RESULT_TIMEOUT) is True for worker in celery_setup.worker_cluster: self.kill_worker(worker, method) celery_setup.worker.restart() sig = long_running_task.si(1).set(queue=queue) res = sig.delay() - assert res.get(timeout=10) is True - - def test_task_retry_on_worker_crash( - self, - celery_setup: CeleryTestSetup, - method: WorkerKill, - ): - assert len(celery_setup.worker_cluster) > 1 - - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: works with RabbitMQ, but not Redis") - - sleep_time = 4 - queue = celery_setup.worker.worker_queue - sig = long_running_task.si(sleep_time, verbose=True).set(queue=queue) - res = sig.apply_async(retry=True, retry_policy={"max_retries": 1}) - celery_setup.worker.wait_for_log("Sleeping: 2") # Let task run - self.kill_worker(celery_setup.worker, method) - assert res.get(timeout=10) is True + assert res.get(timeout=RESULT_TIMEOUT) is True diff --git a/t/smoke/tests/test_control.py b/t/smoke/tests/test_control.py index 5a911524186..7c6123a7db9 100644 --- a/t/smoke/tests/test_control.py +++ b/t/smoke/tests/test_control.py @@ -12,8 +12,7 @@ def test_sanity(self, celery_setup: CeleryTestSetup): ) def test_shutdown_exit_with_zero(self, celery_setup: CeleryTestSetup): - # TODO: celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) - celery_setup.app.control.shutdown() + celery_setup.app.control.shutdown(destination=[celery_setup.worker.hostname()]) while celery_setup.worker.container.status != "exited": celery_setup.worker.container.reload() assert celery_setup.worker.container.attrs["State"]["ExitCode"] == 0 diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 182efc700e7..28e7a304d95 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -32,6 +32,7 @@ def test_restart_between_task_execution( celery_setup: CeleryTestSetup, method: WorkerRestart, ): + # We use freeze() to control the order of execution for the restart operation queue = celery_setup.worker.worker_queue first = long_running_task.si(5, verbose=True).set(queue=queue) first_res = first.freeze() diff --git a/tox.ini b/tox.ini index cb0cca1a719..d4a77bc8e47 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,7 @@ deps= pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt - smoke: pytest-xdist==3.3.1 + smoke: pytest-xdist>=3.5 linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt lint: pre-commit From 6dc797b50ce470201f830f17fe228c7c149a9a6d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 18:57:31 +0200 Subject: [PATCH 0615/1051] [Smoke Tests only] Using pytest-xdist config: --dist=loadscope (#8719) * [Smoke Tests only] Using pytest-xdist config: --dist=loadscope * Trigger CI Tests if tox.ini was changed in a PR --- .github/workflows/python-package.yml | 2 ++ tox.ini | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 1dd4d7a2b92..e4d3858c843 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -11,6 +11,7 @@ on: - '**.txt' - '.github/workflows/python-package.yml' - '**.toml' + - "tox.ini" pull_request: branches: [ 'main', 'smoke_tests' ] paths: @@ -18,6 +19,7 @@ on: - '**.txt' - '**.toml' - '.github/workflows/python-package.yml' + - "tox.ini" permissions: contents: read # to fetch code (actions/checkout) diff --git a/tox.ini b/tox.ini index d4a77bc8e47..8ace1223262 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ deps= commands = unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} + smoke: pytest -xsv t/smoke --dist=loadscope --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From b77bb9c3d650d3889d88c2596a0e2df4b5cac0ee Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Dec 2023 22:15:39 +0200 Subject: [PATCH 0616/1051] Added test_broker_failover::test_broker_failover_ui() (#8720) --- t/smoke/tests/failover/test_broker_failover.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/t/smoke/tests/failover/test_broker_failover.py b/t/smoke/tests/failover/test_broker_failover.py index be41cdcce43..53ccaeee59d 100644 --- a/t/smoke/tests/failover/test_broker_failover.py +++ b/t/smoke/tests/failover/test_broker_failover.py @@ -50,3 +50,11 @@ def test_reconnect_to_main(self, celery_setup: CeleryTestSetup): celery_setup.broker_cluster[0].restart() res = identity.s(expected).apply_async(queue=celery_setup.worker.worker_queue) assert res.get(timeout=RESULT_TIMEOUT) == expected + + def test_broker_failover_ui(self, celery_setup: CeleryTestSetup): + assert len(celery_setup.broker_cluster) > 1 + celery_setup.broker_cluster[0].kill() + celery_setup.worker.assert_log_exists("Will retry using next failover.") + celery_setup.worker.assert_log_exists( + f"Connected to amqp://guest:**@{celery_setup.broker_cluster[1].hostname()}:5672//" + ) From 9ba1669648a48dc3a1188f7e629d173455eb0bc3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 13 Dec 2023 21:52:02 +0200 Subject: [PATCH 0617/1051] Cleanup useless code (#8723) --- t/smoke/tests/failover/test_worker_failover.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index b3b7b788f73..2d5bf48f7d0 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -26,9 +26,7 @@ class test_worker_failover(SuiteOperations): def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - app.conf.worker_max_memory_per_child = 100 * MB if app.conf.broker_url.startswith("redis"): - # Redis Broker optimization to speed up the tests app.conf.broker_transport_options = {"visibility_timeout": 1} yield app @@ -40,9 +38,6 @@ def test_killing_first_worker( assert len(celery_setup.worker_cluster) > 1 queue = celery_setup.worker.worker_queue - sig = long_running_task.si(1).set(queue=queue) - res = sig.delay() - assert res.get(timeout=RESULT_TIMEOUT) is True self.kill_worker(celery_setup.worker, method) sig = long_running_task.si(1).set(queue=queue) res = sig.delay() @@ -56,9 +51,6 @@ def test_reconnect_to_restarted_worker( assert len(celery_setup.worker_cluster) > 1 queue = celery_setup.worker.worker_queue - sig = long_running_task.si(1).set(queue=queue) - res = sig.delay() - assert res.get(timeout=RESULT_TIMEOUT) is True for worker in celery_setup.worker_cluster: self.kill_worker(worker, method) celery_setup.worker.restart() From 3ba927e903f43af2ab2f65b093758148ab79b600 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 25 Dec 2023 20:22:47 +0200 Subject: [PATCH 0618/1051] Added test_thread_safe.py to smoke tests (#8738) --- t/smoke/tests/test_thread_safe.py | 67 +++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 t/smoke/tests/test_thread_safe.py diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py new file mode 100644 index 00000000000..375dff2acdd --- /dev/null +++ b/t/smoke/tests/test_thread_safe.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import Mock + +import pytest +from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster + +from celery.app.base import set_default_app +from celery.signals import after_task_publish +from t.integration.tasks import identity + + +@pytest.fixture( + params=[ + # Single worker + ["celery_setup_worker"], + # Workers cluster (same queue) + ["celery_setup_worker", "celery_alt_dev_worker"], + ] +) +def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster: + nodes: tuple[CeleryTestWorker] = [ + request.getfixturevalue(worker) for worker in request.param + ] + cluster = CeleryWorkerCluster(*nodes) + yield cluster + cluster.teardown() + + +class test_thread_safety: + @pytest.mark.parametrize( + "threads_count", + [ + # Single + 1, + # Multiple + 2, + # Many + 42, + ], + ) + def test_multithread_task_publish( + self, + celery_setup: CeleryTestSetup, + threads_count: int, + ): + signal_was_called = Mock() + + @after_task_publish.connect + def after_task_publish_handler(*args, **kwargs): + nonlocal signal_was_called + signal_was_called(True) + + def thread_worker(): + set_default_app(celery_setup.app) + identity.si("Published from thread").apply_async( + queue=celery_setup.worker.worker_queue + ) + + executor = ThreadPoolExecutor(threads_count) + + with executor: + for _ in range(threads_count): + executor.submit(thread_worker) + + assert signal_was_called.call_count == threads_count From 5a58f1c7258365e95f534a68a6ff1d843733391d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 27 Dec 2023 12:31:20 +0200 Subject: [PATCH 0619/1051] Added task termination tests (#8741) * Fixed wrong type annotations in t/smoke/tests/test_worker.py * Added t/smoke/tests/test_tasks.py::test_task_termination suite * Added 'psutil' to t/smoke/workers/docker/* * Added test_task_termination.test_child_process_respawn() * Added test_task_termination.test_terminated_task_logs() --- t/smoke/operations/task_termination.py | 77 +++++------------- t/smoke/tasks.py | 108 ++----------------------- t/smoke/tests/test_tasks.py | 92 ++++++++++++++++++++- t/smoke/tests/test_worker.py | 4 +- t/smoke/workers/docker/dev | 6 +- t/smoke/workers/docker/pypi | 6 +- 6 files changed, 127 insertions(+), 166 deletions(-) diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py index d51f64da307..a35dbcf0f2f 100644 --- a/t/smoke/operations/task_termination.py +++ b/t/smoke/operations/task_termination.py @@ -1,78 +1,37 @@ from __future__ import annotations -from dataclasses import dataclass from enum import Enum, auto from pytest_celery import CeleryTestWorker -from celery.exceptions import TimeLimitExceeded, WorkerLostError -from t.smoke.tasks import suicide +from celery.canvas import Signature +from celery.result import AsyncResult +from t.smoke.tasks import suicide_delay_timeout, suicide_exhaust_memory, suicide_sigkill, suicide_system_exit class TaskTermination: class Method(Enum): - DELAY_TIMEOUT = auto() - CPU_OVERLOAD = auto() - EXCEPTION = auto() + SIGKILL = auto() SYSTEM_EXIT = auto() - ALLOCATE_MAX_MEMORY = auto() + DELAY_TIMEOUT = auto() EXHAUST_MEMORY = auto() - EXHAUST_HDD = auto() - CONTROL_SHUTDOWN = auto() - SIGKILL = auto() - @dataclass - class Options: - worker: CeleryTestWorker - method: str - allocate: int - large_file_name: str - hostname: str - try_eager: bool = True - time_limit: int = 4 - cpu_load_factor: int = 420 - - def run_suicide_task( + def apply_suicide_task( self, worker: CeleryTestWorker, method: TaskTermination.Method, - **options: dict, - ): - # Update kwargs with default values for missing keys - defaults = { - "worker": worker, - "method": method.name, - "allocate": worker.app.conf.worker_max_memory_per_child * 10**9, - "large_file_name": worker.name(), - "hostname": worker.hostname(), - } - options = {**defaults, **options} - options = TaskTermination.Options(**options) - - expected_error = { - TaskTermination.Method.DELAY_TIMEOUT: TimeLimitExceeded, - TaskTermination.Method.CPU_OVERLOAD: RecursionError, - TaskTermination.Method.EXCEPTION: Exception, - TaskTermination.Method.SYSTEM_EXIT: WorkerLostError, - TaskTermination.Method.ALLOCATE_MAX_MEMORY: MemoryError, - TaskTermination.Method.EXHAUST_MEMORY: WorkerLostError, - TaskTermination.Method.EXHAUST_HDD: OSError, - TaskTermination.Method.SIGKILL: WorkerLostError, - }.get(method) - + ) -> AsyncResult: try: - suicide(**options.__dict__) - except Exception as e: - if expected_error is None: - # No specific error expected, this is an unexpected exception - assert ( - False - ), f"Worker termination by '{method.name}' failed due to an unexpected error: {e}" - - if not isinstance(e, expected_error): - # Specific error expected but an unexpected type of error occurred - assert ( - False - ), f"Worker termination by '{method.name}' failed due to a different error: {e}" + suicide_sig: Signature = { + TaskTermination.Method.SIGKILL: suicide_sigkill.si(), + TaskTermination.Method.SYSTEM_EXIT: suicide_system_exit.si(), + TaskTermination.Method.DELAY_TIMEOUT: suicide_delay_timeout.si(), + TaskTermination.Method.EXHAUST_MEMORY: suicide_exhaust_memory.si(), + }[method] + + return suicide_sig.apply_async(queue=worker.worker_queue) finally: + # If there's an unexpected bug and the termination of the task caused the worker + # to crash, this will refresh the container object with the updated container status + # which can be asserted/checked during a test (for dev/debug) worker.container.reload() diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 549cfb0406a..e15514320d0 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,15 +1,12 @@ from __future__ import annotations -import math import os import sys from signal import SIGKILL -from sys import getsizeof from time import sleep import celery.utils from celery import Task, shared_task, signature -from celery.app.control import Control from celery.canvas import Signature from t.integration.tasks import * # noqa from t.integration.tasks import replaced_with_me @@ -47,74 +44,21 @@ def replace_with_task(self: Task, replace_with: Signature = None): @shared_task -def suicide(method: str, try_eager: bool = True, **options: dict): - termination_method = { - "DELAY_TIMEOUT": suicide_delay_timeout.si( - time_limit=options["time_limit"], - ), - "CPU_OVERLOAD": suicide_cpu_overload.si( - cpu_load_factor=options["cpu_load_factor"] - ), - "EXCEPTION": suicide_exception.si(), - "SYSTEM_EXIT": suicide_system_exit.si(), - "ALLOCATE_MAX_MEMORY": suicide_allocate_max_memory.si( - allocate=options["allocate"] - ), - "EXHAUST_MEMORY": suicide_exhaust_memory.si(), - "EXHAUST_HDD": suicide_exhaust_hdd.si( - large_file_name=options["large_file_name"] - ), - "CONTROL_SHUTDOWN": suicide_control_shutdown.si( - hostname=options["hostname"], - ), - "SIGKILL": suicide_sigkill.si(), - } - - sig = termination_method.get(method) - if sig: - if try_eager and method in { - "CONTROL_SHUTDOWN", - }: - return sig.apply().get() - - worker = options["worker"] - return sig.apply_async(queue=worker.worker_queue).get() - else: - raise ValueError(f"Unsupported termination method: {method}") - - -@shared_task(time_limit=2) -def suicide_delay_timeout(time_limit: int = 4): - """Delays the execution to simulate a task timeout.""" - sleep(time_limit) - - -@shared_task -def suicide_cpu_overload(cpu_load_factor: int = 420): - """Performs CPU-intensive operations to simulate a CPU overload.""" - - def cpu_intensive_calculation(n): - return cpu_intensive_calculation(math.sin(n)) - - cpu_intensive_calculation(cpu_load_factor) - - -@shared_task -def suicide_exception(): - """Raises an exception to simulate an unexpected error during task execution.""" - raise Exception("Simulated task failure due to an exception.") +def suicide_sigkill(): + """Forceful termination.""" + os.kill(os.getpid(), SIGKILL) @shared_task def suicide_system_exit(): """Triggers a system exit to simulate a critical stop of the Celery worker.""" - sys.exit("Simulated Celery worker stop via system exit.") + sys.exit(1) -@shared_task -def suicide_allocate_max_memory(allocate: int): - """Allocates the maximum amount of memory permitted, potentially leading to memory errors.""" - _ = [0] * (allocate // getsizeof(int())) +@shared_task(time_limit=2) +def suicide_delay_timeout(): + """Delays the execution to simulate a task timeout.""" + sleep(4) @shared_task @@ -123,39 +67,3 @@ def suicide_exhaust_memory(): mem = [] while True: mem.append(" " * 10**6) - - -@shared_task -def suicide_exhaust_hdd(large_file_name: str = "large_file"): - """Consumes disk space in /tmp to simulate a scenario where the disk is getting full.""" - # file_path = f"/tmp/{large_file_name}.tmp" - # try: - # with open(file_path, "wb") as f: - # chunk = b"\0" * 42 * 1024**2 # 42 MB - # while True: - # f.write(chunk) - # finally: - # if os.path.exists(file_path): - # os.remove(file_path) - - # This code breaks GitHub CI so we simulate the same error as best effort - ######################################################################### - # [error]Failed to create step summary using 'GITHUB_STEP_SUMMARY': No space left on device - # [error]No space left on device - raise OSError("No space left on device") - - -@shared_task -def suicide_control_shutdown(hostname: str): - """Initiates a controlled shutdown via the Control API.""" - from celery.app.base import get_current_app - - app = get_current_app() - control: Control = app.control - control.shutdown(destination=[hostname]) - - -@shared_task -def suicide_sigkill(): - """Forceful termination.""" - os.kill(os.getpid(), SIGKILL) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 162db9bfc70..6909d40f024 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -1,11 +1,101 @@ import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster +from retry import retry -from celery import signature +from celery import Celery, signature +from celery.exceptions import TimeLimitExceeded, WorkerLostError from t.integration.tasks import add, identity +from t.smoke.conftest import SuiteOperations, TaskTermination from t.smoke.tasks import replace_with_task +class test_task_termination(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_prefetch_multiplier = 1 + app.conf.worker_concurrency = 1 + yield app + + @pytest.mark.parametrize( + "method,expected_error", + [ + (TaskTermination.Method.SIGKILL, WorkerLostError), + (TaskTermination.Method.SYSTEM_EXIT, WorkerLostError), + (TaskTermination.Method.DELAY_TIMEOUT, TimeLimitExceeded), + (TaskTermination.Method.EXHAUST_MEMORY, WorkerLostError), + ], + ) + def test_child_process_respawn( + self, + celery_setup: CeleryTestSetup, + method: TaskTermination.Method, + expected_error: Exception, + ): + pinfo_before = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + + with pytest.raises(expected_error): + self.apply_suicide_task(celery_setup.worker, method).get() + + # Allowing the worker to respawn the child process before we continue + @retry(tries=42, delay=0.1) # 4.2 seconds + def wait_for_two_celery_processes(): + pinfo_current = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + if len(pinfo_current) != 2: + assert ( + False + ), f"Child process did not respawn with method: {method.name}" + + wait_for_two_celery_processes() + + pinfo_after = celery_setup.worker.get_running_processes_info( + ["pid", "name"], + filters={"name": "celery"}, + ) + + pids_before = {item["pid"] for item in pinfo_before} + pids_after = {item["pid"] for item in pinfo_after} + assert len(pids_before | pids_after) == 3 + + @pytest.mark.parametrize( + "method,expected_log", + [ + ( + TaskTermination.Method.SIGKILL, + "Worker exited prematurely: signal 9 (SIGKILL)", + ), + ( + TaskTermination.Method.SYSTEM_EXIT, + "Worker exited prematurely: exitcode 1", + ), + ( + TaskTermination.Method.DELAY_TIMEOUT, + "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + ), + ( + TaskTermination.Method.EXHAUST_MEMORY, + "Worker exited prematurely: signal 9 (SIGKILL)", + ), + ], + ) + def test_terminated_task_logs( + self, + celery_setup: CeleryTestSetup, + method: TaskTermination.Method, + expected_log: str, + ): + with pytest.raises(Exception): + self.apply_suicide_task(celery_setup.worker, method).get() + + celery_setup.worker.assert_log_exists(expected_log) + + class test_replace: @pytest.fixture def celery_worker_cluster( diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 28e7a304d95..6aefc731304 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -19,7 +19,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_restart_during_task_execution( self, celery_setup: CeleryTestSetup, - method: WorkerRestart, + method: WorkerRestart.Method, ): queue = celery_setup.worker.worker_queue sig = long_running_task.si(5, verbose=True).set(queue=queue) @@ -30,7 +30,7 @@ def test_restart_during_task_execution( def test_restart_between_task_execution( self, celery_setup: CeleryTestSetup, - method: WorkerRestart, + method: WorkerRestart.Method, ): # We use freeze() to control the order of execution for the restart operation queue = celery_setup.worker.worker_queue diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index ee1709835e3..8265e56d7be 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -21,8 +21,10 @@ ENV PYTHONDONTWRITEBYTECODE=1 WORKDIR /celery COPY --chown=test_user:test_user . /celery -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -e /celery[redis,memcache,pymemcache] +RUN pip install --no-cache-dir --upgrade \ + pip \ + -e /celery[redis,memcache,pymemcache] \ + psutil # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 85d51dadf9a..4d3300d3e28 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -20,8 +20,10 @@ ENV PYTHONUNBUFFERED=1 ENV PYTHONDONTWRITEBYTECODE=1 # Install Python dependencies -RUN pip install --no-cache-dir --upgrade pip \ - && pip install --no-cache-dir celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} +RUN pip install --no-cache-dir --upgrade \ + pip \ + celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ + psutil # The workdir must be /app WORKDIR /app From dd92814a5322aae3df6cbb132db615825ee28fe2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 3 Jan 2024 03:18:12 +0200 Subject: [PATCH 0620/1051] Use pytest-celery via PyPI: v1.0.0a11 (#8749) --- requirements/test-tmp_for_dev.txt | 3 --- requirements/test.txt | 4 ++-- tox.ini | 1 - 3 files changed, 2 insertions(+), 6 deletions(-) delete mode 100644 requirements/test-tmp_for_dev.txt diff --git a/requirements/test-tmp_for_dev.txt b/requirements/test-tmp_for_dev.txt deleted file mode 100644 index 326c2e82e07..00000000000 --- a/requirements/test-tmp_for_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ -# -e ../pytest-celery -git+https://github.com/celery/pytest-celery.git -# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@BRANCH_NAME#egg=pytest-celery \ No newline at end of file diff --git a/requirements/test.txt b/requirements/test.txt index 2b26eef5e9f..82b33838875 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ pytest==7.4.4 -# pytest-celery==1.0.0a1 -pytest-rerunfailures==12.0 +pytest-celery==1.0.0a11 +pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 pytest-click==1.1.0 diff --git a/tox.ini b/tox.ini index 8ace1223262..37a568a00b2 100644 --- a/tox.ini +++ b/tox.ini @@ -29,7 +29,6 @@ passenv = deps= -r{toxinidir}/requirements/test.txt - -r{toxinidir}/requirements/test-tmp_for_dev.txt -r{toxinidir}/requirements/pkgutils.txt 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt From e350e809c1339fa97d26a302b94e2cb1de0b9ccd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 6 Jan 2024 21:57:02 +0200 Subject: [PATCH 0621/1051] Updated Community standards (#8758) * Added pytest-celery to .github/ISSUE_TEMPLATE/config.yml * Added pytest-celery to CONTRIBUTING.rst * Added Tomer Nosrati to CONTRIBUTING.rst * Added Tomer Nosrati to CONTRIBUTORS.txt --- .github/ISSUE_TEMPLATE/config.yml | 2 ++ CONTRIBUTING.rst | 15 +++++++++++++++ CONTRIBUTORS.txt | 1 + 3 files changed, 18 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 69e8b18cb12..44099454b10 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -9,3 +9,5 @@ contact_links: - name: py-amqp Issue Tracker url: https://github.com/celery/py-amqp/issues/ about: If this issue only involves py-amqp, please open a new issue there. + - name: pytest-celery Issue Tracker + url: https://github.com/celery/pytest-celery/issues/ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 8fdb3df4dc4..82d5c918a05 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -256,6 +256,7 @@ issue tracker. * :pypi:`kombu`: https://github.com/celery/kombu/issues * :pypi:`amqp`: https://github.com/celery/py-amqp/issues * :pypi:`vine`: https://github.com/celery/vine/issues +* :pypi:`pytest-celery`: https://github.com/celery/pytest-celery/issues * :pypi:`librabbitmq`: https://github.com/celery/librabbitmq/issues * :pypi:`django-celery-beat`: https://github.com/celery/django-celery-beat/issues * :pypi:`django-celery-results`: https://github.com/celery/django-celery-results/issues @@ -1245,6 +1246,11 @@ Josue Balandrano Coronel :github: https://github.com/xirdneh :twitter: https://twitter.com/eusoj_xirdneh +Tomer Nosrati +~~~~~~~~~~~~~ +:github: https://github.com/Nusnus +:twitter: https://x.com/tomer_nosrati + Website ------- @@ -1312,6 +1318,15 @@ Promise/deferred implementation. :PyPI: :pypi:`vine` :docs: https://vine.readthedocs.io +``pytest-celery`` +----------------- + +Pytest plugin for Celery. + +:git: https://github.com/celery/pytest-celery +:PyPI: :pypi:`pytest-celery` +:docs: https://pytest-celery.readthedocs.io + ``billiard`` ------------ diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index d63caa5ca65..e0a8394bc6f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -295,3 +295,4 @@ JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 Jeremy Z. Othieno 2023/07/27 +Tomer Nosrati, 2022/17/07 \ No newline at end of file From 477561d0f74c42675385c358577b78289e257dd0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 8 Jan 2024 06:29:11 +0200 Subject: [PATCH 0622/1051] Upgrade from pytest-celery v1.0.0a11 -> v1.0.0a12 (#8762) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 82b33838875..8912fd59174 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==7.4.4 -pytest-celery==1.0.0a11 +pytest-celery==1.0.0a12 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 From 7d2cda1851e2aed265bc5ceecc8d18b6f39547e8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 10 Jan 2024 02:17:00 +0200 Subject: [PATCH 0623/1051] Hotfix (#8781) * Added exception msg check to test_terminated_task_logs() * Renamed test_terminated_task_logs -> test_terminated_task_logs_correct_error * Configured app.conf.broker_pool_limit = 42 for test_thread_safety::test_multithread_task_publish * Cleanup * Fixed TaskTermination.Method.DELAY_TIMEOUT case for test_terminated_task_logs_correct_error --- t/smoke/tests/test_tasks.py | 15 ++++++++++++--- t/smoke/tests/test_thread_safe.py | 7 +++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 6909d40f024..7e532594608 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster from retry import retry @@ -64,34 +66,41 @@ def wait_for_two_celery_processes(): assert len(pids_before | pids_after) == 3 @pytest.mark.parametrize( - "method,expected_log", + "method,expected_log,expected_exception_msg", [ ( TaskTermination.Method.SIGKILL, "Worker exited prematurely: signal 9 (SIGKILL)", + None, ), ( TaskTermination.Method.SYSTEM_EXIT, "Worker exited prematurely: exitcode 1", + None, ), ( TaskTermination.Method.DELAY_TIMEOUT, "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + 'TimeLimitExceeded(2,)', ), ( TaskTermination.Method.EXHAUST_MEMORY, "Worker exited prematurely: signal 9 (SIGKILL)", + None, ), ], ) - def test_terminated_task_logs( + def test_terminated_task_logs_correct_error( self, celery_setup: CeleryTestSetup, method: TaskTermination.Method, expected_log: str, + expected_exception_msg: str | None, ): - with pytest.raises(Exception): + try: self.apply_suicide_task(celery_setup.worker, method).get() + except Exception as err: + assert expected_exception_msg or expected_log in str(err) celery_setup.worker.assert_log_exists(expected_log) diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py index 375dff2acdd..0cb4325357f 100644 --- a/t/smoke/tests/test_thread_safe.py +++ b/t/smoke/tests/test_thread_safe.py @@ -6,6 +6,7 @@ import pytest from pytest_celery import CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster +from celery import Celery from celery.app.base import set_default_app from celery.signals import after_task_publish from t.integration.tasks import identity @@ -29,6 +30,12 @@ def celery_worker_cluster(request: pytest.FixtureRequest) -> CeleryWorkerCluster class test_thread_safety: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_pool_limit = 42 + yield app + @pytest.mark.parametrize( "threads_count", [ From 3122d12cd715c6a574ddd57b6146d5017f32e586 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 10 Jan 2024 13:00:06 +0200 Subject: [PATCH 0624/1051] Testing tasks renaming (#8784) --- t/smoke/operations/task_termination.py | 17 +++++++++-------- t/smoke/tasks.py | 8 ++++---- t/smoke/tests/test_tasks.py | 6 +++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py index a35dbcf0f2f..98d2c5fc2e6 100644 --- a/t/smoke/operations/task_termination.py +++ b/t/smoke/operations/task_termination.py @@ -6,7 +6,8 @@ from celery.canvas import Signature from celery.result import AsyncResult -from t.smoke.tasks import suicide_delay_timeout, suicide_exhaust_memory, suicide_sigkill, suicide_system_exit +from t.smoke.tasks import (self_termination_delay_timeout, self_termination_exhaust_memory, self_termination_sigkill, + self_termination_system_exit) class TaskTermination: @@ -16,20 +17,20 @@ class Method(Enum): DELAY_TIMEOUT = auto() EXHAUST_MEMORY = auto() - def apply_suicide_task( + def apply_self_termination_task( self, worker: CeleryTestWorker, method: TaskTermination.Method, ) -> AsyncResult: try: - suicide_sig: Signature = { - TaskTermination.Method.SIGKILL: suicide_sigkill.si(), - TaskTermination.Method.SYSTEM_EXIT: suicide_system_exit.si(), - TaskTermination.Method.DELAY_TIMEOUT: suicide_delay_timeout.si(), - TaskTermination.Method.EXHAUST_MEMORY: suicide_exhaust_memory.si(), + self_termination_sig: Signature = { + TaskTermination.Method.SIGKILL: self_termination_sigkill.si(), + TaskTermination.Method.SYSTEM_EXIT: self_termination_system_exit.si(), + TaskTermination.Method.DELAY_TIMEOUT: self_termination_delay_timeout.si(), + TaskTermination.Method.EXHAUST_MEMORY: self_termination_exhaust_memory.si(), }[method] - return suicide_sig.apply_async(queue=worker.worker_queue) + return self_termination_sig.apply_async(queue=worker.worker_queue) finally: # If there's an unexpected bug and the termination of the task caused the worker # to crash, this will refresh the container object with the updated container status diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index e15514320d0..fcaffb2779a 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -44,25 +44,25 @@ def replace_with_task(self: Task, replace_with: Signature = None): @shared_task -def suicide_sigkill(): +def self_termination_sigkill(): """Forceful termination.""" os.kill(os.getpid(), SIGKILL) @shared_task -def suicide_system_exit(): +def self_termination_system_exit(): """Triggers a system exit to simulate a critical stop of the Celery worker.""" sys.exit(1) @shared_task(time_limit=2) -def suicide_delay_timeout(): +def self_termination_delay_timeout(): """Delays the execution to simulate a task timeout.""" sleep(4) @shared_task -def suicide_exhaust_memory(): +def self_termination_exhaust_memory(): """Continuously allocates memory to simulate memory exhaustion.""" mem = [] while True: diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 7e532594608..cd71bf88478 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -40,7 +40,7 @@ def test_child_process_respawn( ) with pytest.raises(expected_error): - self.apply_suicide_task(celery_setup.worker, method).get() + self.apply_self_termination_task(celery_setup.worker, method).get() # Allowing the worker to respawn the child process before we continue @retry(tries=42, delay=0.1) # 4.2 seconds @@ -80,7 +80,7 @@ def wait_for_two_celery_processes(): ), ( TaskTermination.Method.DELAY_TIMEOUT, - "Hard time limit (2s) exceeded for t.smoke.tasks.suicide_delay_timeout", + "Hard time limit (2s) exceeded for t.smoke.tasks.self_termination_delay_timeout", 'TimeLimitExceeded(2,)', ), ( @@ -98,7 +98,7 @@ def test_terminated_task_logs_correct_error( expected_exception_msg: str | None, ): try: - self.apply_suicide_task(celery_setup.worker, method).get() + self.apply_self_termination_task(celery_setup.worker, method).get() except Exception as err: assert expected_exception_msg or expected_log in str(err) From 701da1ef4040ed0731e9026d54278cc69bbb5f59 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 12 Jan 2024 12:25:27 +0200 Subject: [PATCH 0625/1051] Cleanup (#8788) --- t/smoke/tests/test_canvas.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index e25aaaffc28..2a235da5665 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,4 +1,3 @@ -import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature @@ -35,9 +34,6 @@ def test_sanity(self, celery_setup: CeleryTestSetup): class test_chord: def test_sanity(self, celery_setup: CeleryTestSetup): - if not celery_setup.chords_allowed(): - pytest.skip("Chords are not supported") - upgraded_chord = signature( group( identity.si("header_task1"), From 3252b69109cd6826dc94bce6447823a449dca0a7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 14 Jan 2024 21:20:33 +0200 Subject: [PATCH 0626/1051] Upgrade to pytest-celery v1.0.0b1 (First Beta Release) (#8792) * Refactored yield -> return in all fixtures that makes sense * Upgrade from pytest-celery v1.0.0a12 -> v1.0.0b1 * Added back unit & integration CI --- .github/workflows/python-package.yml | 200 +++++++++--------- requirements/test.txt | 2 +- t/smoke/conftest.py | 2 +- .../tests/failover/test_worker_failover.py | 2 +- t/smoke/tests/stamping/conftest.py | 4 +- t/smoke/tests/test_consumer.py | 8 +- t/smoke/tests/test_tasks.py | 2 +- t/smoke/tests/test_thread_safe.py | 2 +- t/smoke/tests/test_worker.py | 2 +- 9 files changed, 112 insertions(+), 112 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index e4d3858c843..5164695efdb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -25,108 +25,108 @@ permissions: contents: read # to fetch code (actions/checkout) jobs: - # Unit: - - # runs-on: ${{ matrix.os }} - # strategy: - # fail-fast: false - # matrix: - # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] - # os: ["ubuntu-latest", "windows-latest"] - # exclude: - # - python-version: '3.9' - # os: "windows-latest" - # - python-version: 'pypy-3.10' - # os: "windows-latest" - # - python-version: '3.10' - # os: "windows-latest" - # - python-version: '3.11' - # os: "windows-latest" - - # steps: - # - name: Install apt packages - # if: startsWith(matrix.os, 'ubuntu-') - # run: | - # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - # - uses: actions/checkout@v4 - # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v5 - # with: - # python-version: ${{ matrix.python-version }} - # cache: 'pip' - # cache-dependency-path: '**/setup.py' - - # - name: Install tox - # run: python -m pip install --upgrade pip 'tox' tox-gh-actions - # - name: > - # Run tox for - # "${{ matrix.python-version }}-unit" - # timeout-minutes: 30 - # run: | - # tox --verbose --verbose - - # - uses: codecov/codecov-action@v3 - # with: - # flags: unittests # optional - # fail_ci_if_error: true # optional (default = false) - # verbose: true # optional (default = false) - - # Integration: - # needs: - # - Unit - # if: needs.Unit.result == 'success' - # timeout-minutes: 240 - - # runs-on: ubuntu-latest - # strategy: - # fail-fast: false - # matrix: - # python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - # toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] - - # services: - # redis: - # image: redis - # ports: - # - 6379:6379 - # env: - # REDIS_HOST: localhost - # REDIS_PORT: 6379 - # rabbitmq: - # image: rabbitmq - # ports: - # - 5672:5672 - # env: - # RABBITMQ_DEFAULT_USER: guest - # RABBITMQ_DEFAULT_PASS: guest - - # steps: - # - name: Install apt packages - # run: | - # sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - # - uses: actions/checkout@v4 - # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v5 - # with: - # python-version: ${{ matrix.python-version }} - # cache: 'pip' - # cache-dependency-path: '**/setup.py' - # - name: Install tox - # run: python -m pip install --upgrade pip 'tox' tox-gh-actions - # - name: > - # Run tox for - # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - # timeout-minutes: 60 - # run: > - # tox --verbose --verbose -e - # "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + Unit: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] + os: ["ubuntu-latest", "windows-latest"] + exclude: + - python-version: '3.9' + os: "windows-latest" + - python-version: 'pypy-3.10' + os: "windows-latest" + - python-version: '3.10' + os: "windows-latest" + - python-version: '3.11' + os: "windows-latest" + + steps: + - name: Install apt packages + if: startsWith(matrix.os, 'ubuntu-') + run: | + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + + - name: Install tox + run: python -m pip install --upgrade pip 'tox' tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-unit" + timeout-minutes: 30 + run: | + tox --verbose --verbose + + - uses: codecov/codecov-action@v3 + with: + flags: unittests # optional + fail_ci_if_error: true # optional (default = false) + verbose: true # optional (default = false) + + Integration: + needs: + - Unit + if: needs.Unit.result == 'success' + timeout-minutes: 240 + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] + + services: + redis: + image: redis + ports: + - 6379:6379 + env: + REDIS_HOST: localhost + REDIS_PORT: 6379 + rabbitmq: + image: rabbitmq + ports: + - 5672:5672 + env: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + + steps: + - name: Install apt packages + run: | + sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip 'tox' tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv Smoke: - # needs: - # - Integration - # if: needs.Integration.result == 'success' - # timeout-minutes: 240 + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 runs-on: ubuntu-latest strategy: diff --git a/requirements/test.txt b/requirements/test.txt index 8912fd59174..3ada61cca64 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==7.4.4 -pytest-celery==1.0.0a12 +pytest-celery==1.0.0b1 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 25687325dbd..4a00ff63fb4 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -28,7 +28,7 @@ def default_worker_tasks(default_worker_tasks: set) -> set: default_worker_tasks.add(integration_tests_tasks) default_worker_tasks.add(smoke_tests_tasks) - yield default_worker_tasks + return default_worker_tasks redis_image = fetch(repository=REDIS_IMAGE) diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 2d5bf48f7d0..301d7be1047 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -28,7 +28,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.task_acks_late = True if app.conf.broker_url.startswith("redis"): app.conf.broker_transport_options = {"visibility_timeout": 1} - yield app + return app def test_killing_first_worker( self, diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py index db7e86ae030..fa1e3f49874 100644 --- a/t/smoke/tests/stamping/conftest.py +++ b/t/smoke/tests/stamping/conftest.py @@ -11,7 +11,7 @@ def default_worker_tasks(default_worker_tasks: set) -> set: from t.smoke.tests.stamping import tasks as stamping_tasks default_worker_tasks.add(stamping_tasks) - yield default_worker_tasks + return default_worker_tasks @pytest.fixture @@ -19,7 +19,7 @@ def default_worker_signals(default_worker_signals: set) -> set: from t.smoke.tests.stamping import signals default_worker_signals.add(signals) - yield default_worker_signals + return default_worker_signals @pytest.fixture diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 5645f2689b8..2586bbf9f1b 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -15,7 +15,7 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY - yield app + return app class test_worker_enable_prefetch_count_reduction_true: @@ -23,7 +23,7 @@ class test_worker_enable_prefetch_count_reduction_true: def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_enable_prefetch_count_reduction = True - yield app + return app @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): @@ -70,7 +70,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.worker_prefetch_multiplier = 2 app.conf.worker_cancel_long_running_tasks_on_connection_loss = True app.conf.task_acks_late = True - yield app + return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): @@ -91,7 +91,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app.conf.worker_enable_prefetch_count_reduction = False app.conf.worker_cancel_long_running_tasks_on_connection_loss = True app.conf.task_acks_late = True - yield app + return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index cd71bf88478..f4748296b8b 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -17,7 +17,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = 1 app.conf.worker_concurrency = 1 - yield app + return app @pytest.mark.parametrize( "method,expected_error", diff --git a/t/smoke/tests/test_thread_safe.py b/t/smoke/tests/test_thread_safe.py index 0cb4325357f..ceab993e24d 100644 --- a/t/smoke/tests/test_thread_safe.py +++ b/t/smoke/tests/test_thread_safe.py @@ -34,7 +34,7 @@ class test_thread_safety: def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.broker_pool_limit = 42 - yield app + return app @pytest.mark.parametrize( "threads_count", diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 6aefc731304..15fbbf3cda8 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -14,7 +14,7 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_pool_restarts = True app.conf.task_acks_late = True - yield app + return app def test_restart_during_task_execution( self, From ec636fad813320bfb2a860cf69712702dcc530cb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 14 Jan 2024 22:09:47 +0200 Subject: [PATCH 0627/1051] Hotfix (#8794) * Removed smoke_tests branch from .github/workflows/python-package.yml * actions/checkout@v3 -> v4, actions/setup-python@v4 -> v5 * Updated requirements/extras/pytest.txt from pytest-celery==0.0.0 -> pytest-celery==1.0.0b1 * Removed duplicated memcache install in the smoke tests workers --- .github/workflows/python-package.yml | 6 +++--- requirements/extras/pytest.txt | 5 +---- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5164695efdb..ad9e22112bf 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -13,7 +13,7 @@ on: - '**.toml' - "tox.ini" pull_request: - branches: [ 'main', 'smoke_tests' ] + branches: [ 'main' ] paths: - '**.py' - '**.txt' @@ -144,9 +144,9 @@ jobs: run: | sudo apt update - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 0d178f4a462..ed4fe4a199f 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1,4 +1 @@ -pytest-celery==0.0.0 -# pytest-celery==1.0.0a1 -# git+https://github.com/celery/pytest-celery.git -# git+https://github.com/Katz-Consulting-Group/pytest-celery.git@celery_integration#egg=pytest-celery \ No newline at end of file +pytest-celery==1.0.0b1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 8265e56d7be..a0619761cc8 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -23,7 +23,7 @@ WORKDIR /celery COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ - -e /celery[redis,memcache,pymemcache] \ + -e /celery[redis,pymemcache] \ psutil # The workdir must be /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 4d3300d3e28..be8c5871a45 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -22,7 +22,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 # Install Python dependencies RUN pip install --no-cache-dir --upgrade \ pip \ - celery[redis,memcache,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ + celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ psutil # The workdir must be /app From f2407dcbe07f17974bbc164e0ed06967341ddf8d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 03:45:41 +0200 Subject: [PATCH 0628/1051] Moved smoke tests to their own workflow (#8797) --- .github/workflows/python-package.yml | 371 ++++++++++++++++++++++++--- 1 file changed, 337 insertions(+), 34 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ad9e22112bf..a9c6d89ab2e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -122,41 +122,344 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv - Smoke: - needs: - - Integration - if: needs.Integration.result == 'success' - timeout-minutes: 240 + failover: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest + - name: Install apt packages + run: | + sudo apt update - - name: Install apt packages - run: | - sudo apt update + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k failover - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto + stamping: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k stamping + + canvas: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py + + consumer: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py + + control: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py + + signals: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py + + tasks: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py + + thread_safe: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py + + worker: + needs: + - Integration + if: needs.Integration.result == 'success' + timeout-minutes: 240 + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 60 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k test_worker.py From 78c06af57ec0bc4afe84bf21289d2c0b50dcb313 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 13:38:58 +0200 Subject: [PATCH 0629/1051] Bugfix: Worker not consuming tasks after Redis broker restart (#8796) * Revert "Add annotations to minimise differences with celery-aio-pool's tracer.py. (#7925)" This reverts commit 0233c3b674dcfc6fff79f4161ca9a818dabf28e7. * Added smoke test: test_worker_consume_tasks_after_redis_broker_restart * Removed Redis xfail from tests now that the bug is fixed * Renamed smoke tests CI jobs --- .github/workflows/python-package.yml | 18 +++++------ celery/app/trace.py | 36 ++++----------------- t/smoke/tests/test_consumer.py | 47 +++++++++++++++++++++------- 3 files changed, 51 insertions(+), 50 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a9c6d89ab2e..3efa187bc3e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -122,7 +122,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv - failover: + Smoke-failover: needs: - Integration if: needs.Integration.result == 'success' @@ -160,7 +160,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover - stamping: + Smoke-stamping: needs: - Integration if: needs.Integration.result == 'success' @@ -198,7 +198,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k stamping - canvas: + Smoke-canvas: needs: - Integration if: needs.Integration.result == 'success' @@ -236,7 +236,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py - consumer: + Smoke-consumer: needs: - Integration if: needs.Integration.result == 'success' @@ -274,7 +274,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py - control: + Smoke-control: needs: - Integration if: needs.Integration.result == 'success' @@ -312,7 +312,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py - signals: + Smoke-signals: needs: - Integration if: needs.Integration.result == 'success' @@ -350,7 +350,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py - tasks: + Smoke-tasks: needs: - Integration if: needs.Integration.result == 'success' @@ -388,7 +388,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py - thread_safe: + Smoke-thread_safe: needs: - Integration if: needs.Integration.result == 'success' @@ -426,7 +426,7 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py - worker: + Smoke-worker: needs: - Integration if: needs.Integration.result == 'success' diff --git a/celery/app/trace.py b/celery/app/trace.py index 3933d01a481..2e8cf8a3181 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -8,7 +8,6 @@ import sys import time from collections import namedtuple -from typing import Any, Callable, Dict, FrozenSet, Optional, Sequence, Tuple, Type, Union from warnings import warn from billiard.einfo import ExceptionInfo, ExceptionWithTraceback @@ -17,8 +16,6 @@ from kombu.serialization import prepare_accept_content from kombu.utils.encoding import safe_repr, safe_str -import celery -import celery.loaders.app from celery import current_app, group, signals, states from celery._state import _task_stack from celery.app.task import Context @@ -294,20 +291,10 @@ def traceback_clear(exc=None): tb = tb.tb_next -def build_tracer( - name: str, - task: Union[celery.Task, celery.local.PromiseProxy], - loader: Optional[celery.loaders.app.AppLoader] = None, - hostname: Optional[str] = None, - store_errors: bool = True, - Info: Type[TraceInfo] = TraceInfo, - eager: bool = False, - propagate: bool = False, - app: Optional[celery.Celery] = None, - monotonic: Callable[[], int] = time.monotonic, - trace_ok_t: Type[trace_ok_t] = trace_ok_t, - IGNORE_STATES: FrozenSet[str] = IGNORE_STATES) -> \ - Callable[[str, Tuple[Any, ...], Dict[str, Any], Any], trace_ok_t]: +def build_tracer(name, task, loader=None, hostname=None, store_errors=True, + Info=TraceInfo, eager=False, propagate=False, app=None, + monotonic=time.monotonic, trace_ok_t=trace_ok_t, + IGNORE_STATES=IGNORE_STATES): """Return a function that traces task execution. Catches all exceptions and updates result backend with the @@ -387,12 +374,7 @@ def build_tracer( from celery import canvas signature = canvas.maybe_signature # maybe_ does not clone if already - def on_error( - request: celery.app.task.Context, - exc: Union[Exception, Type[Exception]], - state: str = FAILURE, - call_errbacks: bool = True) -> Tuple[Info, Any, Any, Any]: - """Handle any errors raised by a `Task`'s execution.""" + def on_error(request, exc, state=FAILURE, call_errbacks=True): if propagate: raise I = Info(state, exc) @@ -401,13 +383,7 @@ def on_error( ) return I, R, I.state, I.retval - def trace_task( - uuid: str, - args: Sequence[Any], - kwargs: Dict[str, Any], - request: Optional[Dict[str, Any]] = None) -> trace_ok_t: - """Execute and trace a `Task`.""" - + def trace_task(uuid, args, kwargs, request=None): # R - is the possibly prepared return value. # I - is the Info object. # T - runtime diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 2586bbf9f1b..6448946e6fa 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -2,7 +2,7 @@ from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker from celery import Celery -from celery.canvas import group +from celery.canvas import chain, group from t.smoke.tasks import long_running_task, noop WORKER_PREFETCH_MULTIPLIER = 2 @@ -15,6 +15,10 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY + if app.conf.broker_url.startswith("redis"): + app.conf.broker_transport_options = {"visibility_timeout": 1} + if app.conf.result_backend.startswith("redis"): + app.conf.result_backend_transport_options = {"visibility_timeout": 1} return app @@ -27,9 +31,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: Redis Broker Restart is unstable") - sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -51,7 +52,7 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug: Redis Broker Restart is unstable") + pytest.xfail("Potential Bug with Redis Broker") expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) @@ -73,9 +74,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -94,9 +92,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) r = sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -104,3 +99,33 @@ def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTes assert "Task t.smoke.tasks.noop" not in celery_setup.worker.logs() r.get(timeout=RESULT_TIMEOUT) assert "Task t.smoke.tasks.noop" in celery_setup.worker.logs() + + +class test_consumer: + def test_worker_consume_tasks_after_redis_broker_restart( + self, + celery_setup: CeleryTestSetup, + ): + queue = celery_setup.worker.worker_queue + assert noop.s().apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) is None + celery_setup.broker.kill() + celery_setup.worker.wait_for_log("Trying again in 8.00 seconds... (4/100)") + celery_setup.broker.restart() + + count = 5 + assert ( + group(noop.s() for _ in range(count)) + .apply_async(queue=queue) + .get(timeout=RESULT_TIMEOUT) + == [None] * count + ) + + assert ( + chain( + group(noop.si() for _ in range(count)), + group(noop.si() for _ in range(count)), + ) + .apply_async(queue=queue) + .get(timeout=RESULT_TIMEOUT) + == [None] * count + ) From ad4906599e701cc27307716e81998ea80a0b5eef Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 17:03:20 +0200 Subject: [PATCH 0630/1051] Bugfix: Missing id on chain (#8798) * Inherit the lask task id of a chain into the chain itself * Added unit tests * Added integration tests * Added smoke tests * Added documentation in the userguide --- celery/canvas.py | 1 + docs/userguide/canvas.rst | 7 +++++++ t/integration/test_canvas.py | 7 +++++++ t/smoke/tests/test_canvas.py | 23 ++++++++++++++++++++++- t/unit/tasks/test_canvas.py | 7 +++++++ 5 files changed, 44 insertions(+), 1 deletion(-) diff --git a/celery/canvas.py b/celery/canvas.py index a32d3eea7e7..469d3ee99fb 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1261,6 +1261,7 @@ def prepare_steps(self, args, kwargs, tasks, while node.parent: node = node.parent prev_res = node + self.id = last_task_id return tasks, results def apply(self, args=None, kwargs=None, **options): diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index b87dabca17c..58e8dbd8c12 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -614,6 +614,13 @@ Chains can also be made using the ``|`` (pipe) operator: >>> (add.s(2, 2) | mul.s(8) | mul.s(10)).apply_async() +Task ID +~~~~~~~ + +.. versionadded:: 5.4 + +A chain will inherit the task id of the last task in the chain. + Graphs ~~~~~~ diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index b5f88016f82..7c78a98148b 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1030,6 +1030,13 @@ def test_chaining_upgraded_chords_mixed_canvas(self, manager, subtests): # Cleanup redis_connection.delete(redis_key, 'Done') + def test_freezing_chain_sets_id_of_last_task(self, manager): + last_task = add.s(2).set(task_id='42') + c = add.s(4) | last_task + assert c.id is None + c.freeze(last_task.id) + assert c.id == last_task.id + class test_result_set: diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 2a235da5665..7ecf838af90 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,7 +1,8 @@ +import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature -from t.integration.tasks import add, identity +from t.integration.tasks import ExpectedException, add, fail, identity class test_signature: @@ -31,6 +32,26 @@ def test_sanity(self, celery_setup: CeleryTestSetup): res = sig.apply_async() assert res.get(timeout=RESULT_TIMEOUT) == "test_chain" + def test_chain_gets_last_task_id_with_failing_tasks_in_chain(self, celery_setup: CeleryTestSetup): + """https://github.com/celery/celery/issues/8786""" + queue = celery_setup.worker.worker_queue + sig = chain( + identity.si("start").set(queue=queue), + group( + identity.si("a").set(queue=queue), + fail.si().set(queue=queue), + ), + identity.si("break").set(queue=queue), + identity.si("end").set(queue=queue), + ) + res = sig.apply_async() + celery_setup.worker.assert_log_does_not_exist( + "ValueError: task_id must not be empty. Got None instead." + ) + + with pytest.raises(ExpectedException): + res.get(timeout=RESULT_TIMEOUT) + class test_chord: def test_sanity(self, celery_setup: CeleryTestSetup): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 53dc52e5cbb..a90d203e234 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -476,6 +476,13 @@ def test_groups_in_chain_to_chord(self): c = g1 | g2 assert isinstance(c, chord) + def test_prepare_steps_set_last_task_id_to_chain(self): + last_task = self.add.s(2).set(task_id='42') + c = self.add.s(4) | last_task + assert c.id is None + tasks, _ = c.prepare_steps((), {}, c.tasks, last_task_id=last_task.id) + assert c.id == last_task.id + def test_group_to_chord(self): c = ( self.add.s(5) | From b02874bbeb5d5aa701f554febe33d543a9534ee7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 19:41:07 +0200 Subject: [PATCH 0631/1051] Prepare for (pre) release: v5.4.0rc1 (#8800) * Moved whatsnew-5.3.rst to history folder * Fixed formatting in Changelog for v5.3.4, v5.3.5 * Fixed "WARNING: toctree contains reference to nonexisting document whatsnew-5.2" * Added changelog for v5.4.0rc1 --- Changelog.rst | 210 +++++++++++++++------------- docs/history/index.rst | 1 + docs/{ => history}/whatsnew-5.3.rst | 0 docs/index.rst | 1 - 4 files changed, 114 insertions(+), 98 deletions(-) rename docs/{ => history}/whatsnew-5.3.rst (100%) diff --git a/Changelog.rst b/Changelog.rst index 6904989625a..35a0fff71b4 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,54 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.4.0rc1: + +5.4.0rc1 +======== + +:release-date: 2024-01-17 7:00 P.M GMT+2 +:release-by: Tomer Nosrati + +Celery v5.4 continues our effort to provide improved stability in production +environments. The release candidate version is available for testing. +The official release is planned for March-April 2024. + +- New Config: worker_enable_prefetch_count_reduction (#8581) +- Added "Serverless" section to Redis doc (redis.rst) (#8640) +- Upstash's Celery example repo link fix (#8665) +- Update mypy version (#8679) +- Update cryptography dependency to 41.0.7 (#8690) +- Add type annotations to celery/utils/nodenames.py (#8667) +- Issue 3426. Adding myself to the contributors. (#8696) +- Bump actions/setup-python from 4 to 5 (#8701) +- Fixed bug where chord.link_error() throws an exception on a dict type errback object (#8702) +- Bump github/codeql-action from 2 to 3 (#8725) +- Fixed multiprocessing integration tests not running on Mac (#8727) +- Added make docker-docs (#8729) +- Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) +- Remove `new` adjective in docs (#8743) +- add type annotation to celery/utils/sysinfo.py (#8747) +- add type annotation to celery/utils/iso8601.py (#8750) +- Change type annotation to celery/utils/iso8601.py (#8752) +- Update test deps (#8754) +- Mark flaky: test_asyncresult_get_cancels_subscription() (#8757) +- change _read_as_base64 (b64encode returns bytes) on celery/utils/term.py (#8759) +- Replace string concatenation with fstring on celery/utils/term.py (#8760) +- Add type annotation to celery/utils/term.py (#8755) +- Skipping test_tasks::test_task_accepted (#8761) +- Updated concurrency docs page. (#8753) +- Changed pyup -> dependabot for updating dependencies (#8764) +- Bump isort from 5.12.0 to 5.13.2 (#8772) +- Update elasticsearch requirement from <=8.11.0 to <=8.11.1 (#8775) +- Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) +- Bump python-memcached from 1.59 to 1.61 (#8776) +- Update elastic-transport requirement from <=8.10.0 to <=8.11.0 (#8780) +- python-memcached==1.61 -> python-memcached>=1.61 (#8787) +- Remove usage of utcnow (#8791) +- Smoke Tests (#8793) +- Moved smoke tests to their own workflow (#8797) +- Bugfix: Worker not consuming tasks after Redis broker restart (#8796) +- Bugfix: Missing id on chain (#8798) .. _version-5.3.6: @@ -17,26 +65,17 @@ an overview of what's new in Celery 5.3. :release-date: 2023-11-22 9:15 P.M GMT+6 :release-by: Asif Saif Uddin - This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. The code changes are mostly fix for regressions. More details can be found below. -What's Changed -============== -- Increased docker-build CI job timeout from 30m -> 60m by @Nusnus in https://github.com/celery/celery/pull/8635 -- Incredibly minor spelling fix. by @Asday in https://github.com/celery/celery/pull/8649 -- Fix non-zero exit code when receiving remote shutdown by @lyzlisa in https://github.com/celery/celery/pull/8650 -- Update task.py get_custom_headers missing 'compression' key by @auvipy in https://github.com/celery/celery/pull/8633 -- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer by @auvipy in https://github.com/celery/celery/pull/8646 -- test requirements version update by @auvipy in https://github.com/celery/celery/pull/8655 -- Update elasticsearch version by @auvipy in https://github.com/celery/celery/pull/8656 -- Propagates more ImportErrors during autodiscovery by @johnjameswhitman in https://github.com/celery/celery/pull/8632 - -New Contributors -================ -- @Asday made their first contribution in https://github.com/celery/celery/pull/8649 -- @lyzlisa made their first contribution in https://github.com/celery/celery/pull/8650 -- @johnjameswhitman made their first contribution in https://github.com/celery/celery/pull/8632 +- Increased docker-build CI job timeout from 30m -> 60m (#8635) +- Incredibly minor spelling fix. (#8649) +- Fix non-zero exit code when receiving remote shutdown (#8650) +- Update task.py get_custom_headers missing 'compression' key (#8633) +- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer (#8646) +- test requirements version update (#8655) +- Update elasticsearch version (#8656) +- Propagates more ImportErrors during autodiscovery (#8632) .. _version-5.3.5: @@ -47,86 +86,63 @@ New Contributors :release-date: 2023-11-10 7:15 P.M GMT+6 :release-by: Asif Saif Uddin - -What's Changed -============== -- Update test.txt versions by @auvipy in https://github.com/celery/celery/pull/8481 -- fix os.getcwd() FileNotFoundError by @mortimer2015 in https://github.com/celery/celery/pull/8448 -- Fix typo in CONTRIBUTING.rst by @monteiro-renato in https://github.com/celery/celery/pull/8494 -- typo(doc): configuration.rst by @shifenhutu in https://github.com/celery/celery/pull/8484 -- assert before raise by @monteiro-renato in https://github.com/celery/celery/pull/8495 -- Update GHA checkout version by @auvipy in https://github.com/celery/celery/pull/8496 -- Fixed replaced_task_nesting by @Nusnus in https://github.com/celery/celery/pull/8500 -- Fix code indentation for route_task() example by @stefmolin in https://github.com/celery/celery/pull/8502 -- support redis 5.x by @dulmandakh in https://github.com/celery/celery/pull/8504 -- Fix typos in test_canvas.py by @monteiro-renato in https://github.com/celery/celery/pull/8498 -- Marked flaky tests by @Nusnus in https://github.com/celery/celery/pull/8508 -- Fix typos in calling.rst by @visitorckw in https://github.com/celery/celery/pull/8506 -- Added support for replaced_task_nesting in chains by @Nusnus in https://github.com/celery/celery/pull/8501 -- Fix typos in canvas.rst by @visitorckw in https://github.com/celery/celery/pull/8509 -- Patch Version Release Checklist by @Nusnus in https://github.com/celery/celery/pull/8488 -- Added Python 3.11 support to Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8511 -- Dependabot (Celery) by @Nusnus in https://github.com/celery/celery/pull/8510 -- Bump actions/checkout from 3 to 4 by @dependabot in https://github.com/celery/celery/pull/8512 -- Update ETA example to include timezone by @amantri in https://github.com/celery/celery/pull/8516 -- Replaces datetime.fromisoformat with the more lenient dateutil parser by @stumpylog in https://github.com/celery/celery/pull/8507 -- Fixed indentation in Dockerfile for Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8527 -- Fix git bug in Dockerfile by @Nusnus in https://github.com/celery/celery/pull/8528 -- Tox lint upgrade from Python 3.9 to Python 3.11 by @Nusnus in https://github.com/celery/celery/pull/8526 -- Document gevent concurrency by @cunla in https://github.com/celery/celery/pull/8520 -- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8530 -- Celery Docker Upgrades by @Nusnus in https://github.com/celery/celery/pull/8531 -- pyupgrade upgrade v3.11.0 -> v3.13.0 by @Nusnus in https://github.com/celery/celery/pull/8535 -- Update msgpack.txt by @auvipy in https://github.com/celery/celery/pull/8548 -- Update auth.txt by @auvipy in https://github.com/celery/celery/pull/8547 -- Update msgpack.txt to fix build issues by @auvipy in https://github.com/celery/celery/pull/8552 -- Basic ElasticSearch / ElasticClient 8.x Support by @q2justin in https://github.com/celery/celery/pull/8519 -- Fix eager tasks does not populate name field by @KOliver94 in https://github.com/celery/celery/pull/8486 -- Fix typo in celery.app.control by @Spaceface16518 in https://github.com/celery/celery/pull/8563 -- Update solar.txt ephem by @auvipy in https://github.com/celery/celery/pull/8566 -- Update test.txt pytest-timeout by @auvipy in https://github.com/celery/celery/pull/8565 -- Correct some mypy errors by @rbtcollins in https://github.com/celery/celery/pull/8570 -- Update elasticsearch.txt by @auvipy in https://github.com/celery/celery/pull/8573 -- Update test.txt deps by @auvipy in https://github.com/celery/celery/pull/8574 -- Update test.txt by @auvipy in https://github.com/celery/celery/pull/8590 -- Improved the "Next steps" documentation (#8561). by @frolenkov-nikita in https://github.com/celery/celery/pull/8600 -- Disabled couchbase tests due to broken package breaking main by @Nusnus in https://github.com/celery/celery/pull/8602 -- Update elasticsearch deps by @auvipy in https://github.com/celery/celery/pull/8605 -- Update cryptography==41.0.5 by @auvipy in https://github.com/celery/celery/pull/8604 -- Update pytest==7.4.3 by @auvipy in https://github.com/celery/celery/pull/8606 -- test initial support of python 3.12.x by @auvipy in https://github.com/celery/celery/pull/8549 -- updated new versions to fix CI by @auvipy in https://github.com/celery/celery/pull/8607 -- Update zstd.txt by @auvipy in https://github.com/celery/celery/pull/8609 -- Fixed CI Support with Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8611 -- updated CI, docs and classifier for next release by @auvipy in https://github.com/celery/celery/pull/8613 -- updated dockerfile to add python 3.12 by @auvipy in https://github.com/celery/celery/pull/8614 -- lint,mypy,docker-unit-tests -> Python 3.12 by @Nusnus in https://github.com/celery/celery/pull/8617 -- Correct type of `request` in `task_revoked` documentation by @RJPercival in https://github.com/celery/celery/pull/8616 -- update docs docker image by @auvipy in https://github.com/celery/celery/pull/8618 -- Fixed RecursionError caused by giving `config_from_object` nested mod… by @frolenkov-nikita in https://github.com/celery/celery/pull/8619 -- Fix: serialization error when gossip working by @kitsuyui in https://github.com/celery/celery/pull/6566 -* [documentation] broker_connection_max_retries of 0 does not mean "retry forever" by @jakila in https://github.com/celery/celery/pull/8626 -- added 2 debian package for better stability in Docker by @auvipy in https://github.com/celery/celery/pull/8629 - - -New Contributors -================ -- @mortimer2015 made their first contribution in https://github.com/celery/celery/pull/8448 -- @monteiro-renato made their first contribution in https://github.com/celery/celery/pull/8494 -- @shifenhutu made their first contribution in https://github.com/celery/celery/pull/8484 -- @stefmolin made their first contribution in https://github.com/celery/celery/pull/8502 -- @visitorckw made their first contribution in https://github.com/celery/celery/pull/8506 -- @dependabot made their first contribution in https://github.com/celery/celery/pull/8512 -- @amantri made their first contribution in https://github.com/celery/celery/pull/8516 -- @cunla made their first contribution in https://github.com/celery/celery/pull/8520 -- @q2justin made their first contribution in https://github.com/celery/celery/pull/8519 -- @Spaceface16518 made their first contribution in https://github.com/celery/celery/pull/8563 -- @rbtcollins made their first contribution in https://github.com/celery/celery/pull/8570 -- @frolenkov-nikita made their first contribution in https://github.com/celery/celery/pull/8600 -- @RJPercival made their first contribution in https://github.com/celery/celery/pull/8616 -- @kitsuyui made their first contribution in https://github.com/celery/celery/pull/6566 -- @jakila made their first contribution in https://github.com/celery/celery/pull/8626 - +- Update test.txt versions (#8481) +- fix os.getcwd() FileNotFoundError (#8448) +- Fix typo in CONTRIBUTING.rst (#8494) +- typo(doc): configuration.rst (#8484) +- assert before raise (#8495) +- Update GHA checkout version (#8496) +- Fixed replaced_task_nesting (#8500) +- Fix code indentation for route_task() example (#8502) +- support redis 5.x (#8504) +- Fix typos in test_canvas.py (#8498) +- Marked flaky tests (#8508) +- Fix typos in calling.rst (#8506) +- Added support for replaced_task_nesting in chains (#8501) +- Fix typos in canvas.rst (#8509) +- Patch Version Release Checklist (#8488) +- Added Python 3.11 support to Dockerfile (#8511) +- Dependabot (Celery) (#8510) +- Bump actions/checkout from 3 to 4 (#8512) +- Update ETA example to include timezone (#8516) +- Replaces datetime.fromisoformat with the more lenient dateutil parser (#8507) +- Fixed indentation in Dockerfile for Python 3.11 (#8527) +- Fix git bug in Dockerfile (#8528) +- Tox lint upgrade from Python 3.9 to Python 3.11 (#8526) +- Document gevent concurrency (#8520) +- Update test.txt (#8530) +- Celery Docker Upgrades (#8531) +- pyupgrade upgrade v3.11.0 -> v3.13.0 (#8535) +- Update msgpack.txt (#8548) +- Update auth.txt (#8547) +- Update msgpack.txt to fix build issues (#8552) +- Basic ElasticSearch / ElasticClient 8.x Support (#8519) +- Fix eager tasks does not populate name field (#8486) +- Fix typo in celery.app.control (#8563) +- Update solar.txt ephem (#8566) +- Update test.txt pytest-timeout (#8565) +- Correct some mypy errors (#8570) +- Update elasticsearch.txt (#8573) +- Update test.txt deps (#8574) +- Update test.txt (#8590) +- Improved the "Next steps" documentation (#8561). (#8600) +- Disabled couchbase tests due to broken package breaking main (#8602) +- Update elasticsearch deps (#8605) +- Update cryptography==41.0.5 (#8604) +- Update pytest==7.4.3 (#8606) +- test initial support of python 3.12.x (#8549) +- updated new versions to fix CI (#8607) +- Update zstd.txt (#8609) +- Fixed CI Support with Python 3.12 (#8611) +- updated CI, docs and classifier for next release (#8613) +- updated dockerfile to add python 3.12 (#8614) +- lint,mypy,docker-unit-tests -> Python 3.12 (#8617) +- Correct type of `request` in `task_revoked` documentation (#8616) +- update docs docker image (#8618) +- Fixed RecursionError caused by giving `config_from_object` nested mod… (#8619) +- Fix: serialization error when gossip working (#6566) +- [documentation] broker_connection_max_retries of 0 does not mean "retry forever" (#8626) +- added 2 debian package for better stability in Docker (#8629) .. _version-5.3.4: diff --git a/docs/history/index.rst b/docs/history/index.rst index 35423550084..b0c39767826 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -13,6 +13,7 @@ version please visit :ref:`changelog`. .. toctree:: :maxdepth: 2 + whatsnew-5.3 whatsnew-5.1 changelog-5.1 whatsnew-5.0 diff --git a/docs/whatsnew-5.3.rst b/docs/history/whatsnew-5.3.rst similarity index 100% rename from docs/whatsnew-5.3.rst rename to docs/history/whatsnew-5.3.rst diff --git a/docs/index.rst b/docs/index.rst index 915b7c088aa..299fb5749f2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -58,7 +58,6 @@ Contents tutorials/index faq changelog - whatsnew-5.2 reference/index internals/index history/index From 5d97edc0ed34c5cf1c122f9d57552f8dac419766 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Jan 2024 19:47:27 +0200 Subject: [PATCH 0632/1051] =?UTF-8?q?Bump=20version:=205.3.6=20=E2=86=92?= =?UTF-8?q?=205.4.0rc1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 4 ++-- docs/includes/introduction.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 412d6ea69b4..f82cfbd7d53 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.3.6 +current_version = 5.4.0rc1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 7a2b2411f37..e206ec30140 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.3.6 (emerald-rush) +:Version: 5.4.0rc1 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index c60dbd4fe58..7212e277efc 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -15,9 +15,9 @@ # Lazy loading from . import local -SERIES = 'emerald-rush' +SERIES = 'opalescent' -__version__ = '5.3.6' +__version__ = '5.4.0rc1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 79eb36eeb34..e3df2ded029 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.3.6 (emerald-rush) +:Version: 5.4.0rc1 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 2576e83dcba0edb06e8a4b5027b1fcb586972050 Mon Sep 17 00:00:00 2001 From: Axel H Date: Thu, 18 Jan 2024 00:27:27 +0100 Subject: [PATCH 0633/1051] feat(daemon): allows daemonization options to be fetched from app settings (#8553) * feat(daemon): allows daemonization options to be fetched from app settings * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Update docs/userguide/configuration.rst Co-authored-by: Omer Katz * Apply suggestions from code review Co-authored-by: Omer Katz * doc(configuration): add version added markers to the new daemonization settings --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz Co-authored-by: Tomer Nosrati --- celery/bin/base.py | 34 ++++-- docs/userguide/configuration.rst | 199 +++++++++++++++++++++++++++++++ t/unit/bin/proj/daemon.py | 4 + t/unit/bin/proj/daemon_config.py | 22 ++++ t/unit/bin/test_daemonization.py | 22 ++++ 5 files changed, 273 insertions(+), 8 deletions(-) create mode 100644 t/unit/bin/proj/daemon.py create mode 100644 t/unit/bin/proj/daemon_config.py create mode 100644 t/unit/bin/test_daemonization.py diff --git a/celery/bin/base.py b/celery/bin/base.py index 63a2895758b..57158a27e06 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -4,9 +4,10 @@ from collections import OrderedDict from functools import update_wrapper from pprint import pformat +from typing import Any import click -from click import ParamType +from click import Context, ParamType from kombu.utils.objects import cached_property from celery._state import get_current_app @@ -170,19 +171,36 @@ def format_options(self, ctx, formatter): formatter.write_dl(opts_group) +class DaemonOption(CeleryOption): + """Common daemonization option""" + def __init__(self, *args, **kwargs): + super().__init__(args, + help_group=kwargs.pop("help_group", "Daemonization Options"), + callback=kwargs.pop("callback", self.daemon_setting), + **kwargs) + + def daemon_setting(self, ctx: Context, opt: CeleryOption, value: Any) -> Any: + """ + Try to fetch deamonization option from applications settings. + Use the daemon command name as prefix (eg. `worker` -> `worker_pidfile`) + """ + return value or getattr(ctx.obj.app.conf, f"{ctx.command.name}_{self.name}", None) + + class CeleryDaemonCommand(CeleryCommand): """Daemon commands.""" def __init__(self, *args, **kwargs): """Initialize a Celery command with common daemon options.""" super().__init__(*args, **kwargs) - self.params.append(CeleryOption(('-f', '--logfile'), help_group="Daemonization Options", - help="Log destination; defaults to stderr")) - self.params.append(CeleryOption(('--pidfile',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--uid',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--gid',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--umask',), help_group="Daemonization Options")) - self.params.append(CeleryOption(('--executable',), help_group="Daemonization Options")) + self.params.extend(( + DaemonOption("--logfile", "-f", help="Log destination; defaults to stderr"), + DaemonOption("--pidfile", help="PID file path; defaults to no PID file"), + DaemonOption("--uid", help="Drops privileges to this user ID"), + DaemonOption("--gid", help="Drops privileges to this group ID"), + DaemonOption("--umask", help="Create files and directories with this umask"), + DaemonOption("--executable", help="Override path to the Python executable"), + )) class CommaSeparatedList(ParamType): diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 66a4ee71606..8b0c01bcf86 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3219,6 +3219,71 @@ Message serialization format used when sending event messages. :ref:`calling-serializers`. +.. setting:: events_logfile + +``events_logfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery events` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: events_pidfile + +``events_pidfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery events` to create/store its PID file (default to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: events_uid + +``events_uid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when events :program:`celery events` drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: events_gid + +``events_gid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery events` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: events_umask + +``events_umask`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery events` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: events_executable + +``events_executable`` +~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery events` to use when deaemonizing (defaults to :data:`sys.executable`). + + .. _conf-control: Remote Control Commands @@ -3487,6 +3552,74 @@ Default: ``"kombu.asynchronous.hub.timer:Timer"``. Name of the ETA scheduler class used by the worker. Default is or set by the pool implementation. +.. setting:: worker_logfile + +``worker_logfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery worker` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: worker_pidfile + +``worker_pidfile`` +~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery worker` to create/store its PID file (defaults to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: worker_uid + +``worker_uid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when :program:`celery worker` daemon drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: worker_gid + +``worker_gid`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery worker` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: worker_umask + +``worker_umask`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery worker` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: worker_executable + +``worker_executable`` +~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). + +.. versionadded:: 5.4 + + + .. _conf-celerybeat: Beat Settings (:program:`celery beat`) @@ -3573,3 +3706,69 @@ Default: None. When using cron, the number of seconds :mod:`~celery.bin.beat` can look back when deciding whether a cron schedule is due. When set to `None`, cronjobs that are past due will always run immediately. + +.. setting:: beat_logfile + +``beat_logfile`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery beat` to log into (defaults to `stdout`). + +.. versionadded:: 5.4 + +.. setting:: beat_pidfile + +``beat_pidfile`` +~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional file path for :program:`celery beat` to create/store it PID file (defaults to no PID file created). + +.. versionadded:: 5.4 + +.. setting:: beat_uid + +``beat_uid`` +~~~~~~~~~~~~ + +Default: :const:`None` + +An optional user ID to use when beat :program:`celery beat` drops its privileges (defaults to no UID change). + +.. versionadded:: 5.4 + +.. setting:: beat_gid + +``beat_gid`` +~~~~~~~~~~~~ + +Default: :const:`None` + +An optional group ID to use when :program:`celery beat` daemon drops its privileges (defaults to no GID change). + +.. versionadded:: 5.4 + +.. setting:: beat_umask + +``beat_umask`` +~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `umask` to use when :program:`celery beat` creates files (log, pid...) when daemonizing. + +.. versionadded:: 5.4 + +.. setting:: beat_executable + +``beat_executable`` +~~~~~~~~~~~~~~~~~~~ + +Default: :const:`None` + +An optional `python` executable path for :program:`celery beat` to use when deaemonizing (defaults to :data:`sys.executable`). + +.. versionadded:: 5.4 diff --git a/t/unit/bin/proj/daemon.py b/t/unit/bin/proj/daemon.py new file mode 100644 index 00000000000..82c642a5f95 --- /dev/null +++ b/t/unit/bin/proj/daemon.py @@ -0,0 +1,4 @@ +from celery import Celery + +app = Celery(set_as_current=False) +app.config_from_object("t.unit.bin.proj.daemon_config") diff --git a/t/unit/bin/proj/daemon_config.py b/t/unit/bin/proj/daemon_config.py new file mode 100644 index 00000000000..e0b6d151ce7 --- /dev/null +++ b/t/unit/bin/proj/daemon_config.py @@ -0,0 +1,22 @@ +# Test config for t/unit/bin/test_deamonization.py + +beat_pidfile = "/tmp/beat.test.pid" +beat_logfile = "/tmp/beat.test.log" +beat_uid = 42 +beat_gid = 4242 +beat_umask = 0o777 +beat_executable = "/beat/bin/python" + +events_pidfile = "/tmp/events.test.pid" +events_logfile = "/tmp/events.test.log" +events_uid = 42 +events_gid = 4242 +events_umask = 0o777 +events_executable = "/events/bin/python" + +worker_pidfile = "/tmp/worker.test.pid" +worker_logfile = "/tmp/worker.test.log" +worker_uid = 42 +worker_gid = 4242 +worker_umask = 0o777 +worker_executable = "/worker/bin/python" diff --git a/t/unit/bin/test_daemonization.py b/t/unit/bin/test_daemonization.py new file mode 100644 index 00000000000..9bd2be79beb --- /dev/null +++ b/t/unit/bin/test_daemonization.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from celery.bin.celery import celery + +from .proj import daemon_config as config + + +@pytest.mark.usefixtures('depends_on_current_app') +@pytest.mark.parametrize("daemon", ["worker", "beat", "events"]) +def test_daemon_options_from_config(daemon: str, cli_runner: CliRunner): + + with patch(f"celery.bin.{daemon}.{daemon}.callback") as mock: + cli_runner.invoke(celery, f"-A t.unit.bin.proj.daemon {daemon}") + + mock.assert_called_once() + for param in "logfile", "pidfile", "uid", "gid", "umask", "executable": + assert mock.call_args.kwargs[param] == getattr(config, f"{daemon}_{param}") From 5b9c7d18d205b4fe02b609c308b3c906f0eb0796 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 18 Jan 2024 01:36:14 +0200 Subject: [PATCH 0634/1051] Fixed version documentation tag from #8553 in configuration.rst (#8802) --- docs/userguide/configuration.rst | 72 ++++++++++++++++---------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 8b0c01bcf86..2825c58434a 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3224,61 +3224,63 @@ Message serialization format used when sending event messages. ``events_logfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery events` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: events_pidfile ``events_pidfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery events` to create/store its PID file (default to no PID file created). -.. versionadded:: 5.4 - .. setting:: events_uid ``events_uid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when events :program:`celery events` drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: events_gid ``events_gid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery events` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: events_umask ``events_umask`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery events` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: events_executable ``events_executable`` ~~~~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `python` executable path for :program:`celery events` to use when deaemonizing (defaults to :data:`sys.executable`). @@ -3557,68 +3559,66 @@ Default is or set by the pool implementation. ``worker_logfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery worker` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: worker_pidfile ``worker_pidfile`` ~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery worker` to create/store its PID file (defaults to no PID file created). -.. versionadded:: 5.4 - .. setting:: worker_uid ``worker_uid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when :program:`celery worker` daemon drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: worker_gid ``worker_gid`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery worker` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: worker_umask ``worker_umask`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery worker` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: worker_executable ``worker_executable`` ~~~~~~~~~~~~~~~~~~~~~ -Default: :const:`None` - -An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). - .. versionadded:: 5.4 +Default: :const:`None` +An optional `python` executable path for :program:`celery worker` to use when deaemonizing (defaults to :data:`sys.executable`). .. _conf-celerybeat: @@ -3712,63 +3712,63 @@ are past due will always run immediately. ``beat_logfile`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery beat` to log into (defaults to `stdout`). -.. versionadded:: 5.4 - .. setting:: beat_pidfile ``beat_pidfile`` ~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional file path for :program:`celery beat` to create/store it PID file (defaults to no PID file created). -.. versionadded:: 5.4 - .. setting:: beat_uid ``beat_uid`` ~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional user ID to use when beat :program:`celery beat` drops its privileges (defaults to no UID change). -.. versionadded:: 5.4 - .. setting:: beat_gid ``beat_gid`` ~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional group ID to use when :program:`celery beat` daemon drops its privileges (defaults to no GID change). -.. versionadded:: 5.4 - .. setting:: beat_umask ``beat_umask`` ~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `umask` to use when :program:`celery beat` creates files (log, pid...) when daemonizing. -.. versionadded:: 5.4 - .. setting:: beat_executable ``beat_executable`` ~~~~~~~~~~~~~~~~~~~ +.. versionadded:: 5.4 + Default: :const:`None` An optional `python` executable path for :program:`celery beat` to use when deaemonizing (defaults to :data:`sys.executable`). - -.. versionadded:: 5.4 From d7700e259d89efbfb432e429ef89404b8328b261 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 18 Jan 2024 03:40:15 +0200 Subject: [PATCH 0635/1051] Upgraded Sphinx from v5.x.x to v7.x.x (#8803) --- requirements/docs.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 2596004d021..d4d43fb27c2 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ -sphinx_celery>=2.0.0 -Sphinx==5.3.0 +sphinx_celery>=2.1.1 +Sphinx>=7.0.0 sphinx-testing~=1.0.1 sphinx-click==5.1.0 -r extras/sqlalchemy.txt From 4a3930249aea8f72e62ce8fc97ae00d54f8ed2c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 01:39:30 +0200 Subject: [PATCH 0636/1051] Update elasticsearch requirement from <=8.11.1 to <=8.12.0 (#8810) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.12.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 696c6ce76cc..7c08aef8179 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.11.1 +elasticsearch<=8.12.0 elastic-transport<=8.11.0 From 8f389997887232500d4aa1a2b0ae0c7320c4c84a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 01:42:13 +0200 Subject: [PATCH 0637/1051] Update elastic-transport requirement from <=8.11.0 to <=8.12.0 (#8811) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.12.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 7c08aef8179..39417c6d221 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.12.0 -elastic-transport<=8.11.0 +elastic-transport<=8.12.0 From 939f7b9cf4c6280382735a8422e7d2f2f3258c1f Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 23 Jan 2024 04:21:17 +0200 Subject: [PATCH 0638/1051] Update cryptography from 41.0.7 to 42.0.0 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ab817dd3527..c432c23341b 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==41.0.7 +cryptography==42.0.0 From 3e98049be9e0603c0f2065970848d14d47dcbb82 Mon Sep 17 00:00:00 2001 From: Andy Zickler Date: Tue, 23 Jan 2024 16:08:08 -0500 Subject: [PATCH 0639/1051] Catch UnicodeDecodeError or TypeError when opening beat schedule db (#8806) There is existing code to detect if celerybeat-schedule.db is corrupted and recreate it, however sometimes a UnicodeDecodeError or TypeError is thrown in the process of throwing the KeyError. This catches that error and allows Beat to use the existing code to recreate the database. (Fixes #2907) --- CONTRIBUTORS.txt | 3 ++- celery/beat.py | 4 ++-- t/unit/app/test_beat.py | 34 +++++++++++++++++++++++++++++++++- 3 files changed, 37 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e0a8394bc6f..6159effcc3a 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -295,4 +295,5 @@ JoonHwan Kim, 2022/08/01 Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 Jeremy Z. Othieno 2023/07/27 -Tomer Nosrati, 2022/17/07 \ No newline at end of file +Tomer Nosrati, 2022/17/07 +Andy Zickler, 2024/01/18 \ No newline at end of file diff --git a/celery/beat.py b/celery/beat.py index 76e44721e14..9656493ecbe 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -568,11 +568,11 @@ def _create_schedule(self): for _ in (1, 2): try: self._store['entries'] - except KeyError: + except (KeyError, UnicodeDecodeError, TypeError): # new schedule db try: self._store['entries'] = {} - except KeyError as exc: + except (KeyError, UnicodeDecodeError, TypeError) as exc: self._store = self._destroy_open_corrupted_schedule(exc) continue else: diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index fa163bb931e..a95e8e41409 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -2,7 +2,7 @@ import sys from datetime import datetime, timedelta, timezone from pickle import dumps, loads -from unittest.mock import Mock, call, patch +from unittest.mock import MagicMock, Mock, call, patch import pytest @@ -669,6 +669,38 @@ def test_remove_db(self, remove): with pytest.raises(OSError): s._remove_db() + def test_create_schedule_corrupted(self): + """ + Test that any decoding errors that might happen when opening beat-schedule.db are caught + """ + s = create_persistent_scheduler()[0](app=self.app, + schedule_filename='schedule') + s._store = MagicMock() + s._destroy_open_corrupted_schedule = Mock() + s._destroy_open_corrupted_schedule.return_value = MagicMock() + + # self._store['entries'] will throw a KeyError + s._store.__getitem__.side_effect = KeyError() + # then, when _create_schedule tries to reset _store['entries'], throw another error + expected_error = UnicodeDecodeError("ascii", b"ordinal not in range(128)", 0, 0, "") + s._store.__setitem__.side_effect = expected_error + + s._create_schedule() + s._destroy_open_corrupted_schedule.assert_called_with(expected_error) + + def test_create_schedule_missing_entries(self): + """ + Test that if _create_schedule can't find the key "entries" in _store it will recreate it + """ + s = create_persistent_scheduler()[0](app=self.app, schedule_filename="schedule") + s._store = MagicMock() + + # self._store['entries'] will throw a KeyError + s._store.__getitem__.side_effect = TypeError() + + s._create_schedule() + s._store.__setitem__.assert_called_with("entries", {}) + def test_setup_schedule(self): s = create_persistent_scheduler()[0](app=self.app, schedule_filename='schedule') From b1c8b28fc273a2bea71c812f74c2ffa3bf9c907e Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Thu, 25 Jan 2024 06:20:00 +0200 Subject: [PATCH 0640/1051] Update cryptography from 42.0.0 to 42.0.1 --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index c432c23341b..d0384ae0df0 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.0 +cryptography==42.0.1 From 2b3fde49576771975ec462243f9adf296938f616 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 28 Jan 2024 23:53:57 +0200 Subject: [PATCH 0641/1051] Limit moto to <5.0.0 until the breaking issues are fixed (#8820) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 3ada61cca64..579a73977fd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ pytest-timeout==2.2.0 pytest-click==1.1.0 pytest-order==1.2.0 boto3>=1.26.143 -moto>=4.1.11 +moto>=4.1.11,<5.0.0 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" pre-commit==3.5.0 From 86895a9914853945b2bbd1f439cb37ed32c78697 Mon Sep 17 00:00:00 2001 From: Xiong Ding Date: Mon, 29 Jan 2024 05:00:47 -0800 Subject: [PATCH 0642/1051] Enable efficient `chord` when using dynamicdb as backend store (#8783) * test * add unit test * test * revert bad test chamnge * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/backends/base.py | 2 +- celery/backends/dynamodb.py | 54 ++++++++++++++++++ docs/userguide/canvas.rst | 4 +- t/unit/backends/test_dynamodb.py | 95 +++++++++++++++++++++++++++++++- 4 files changed, 150 insertions(+), 5 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index f7d62c3dbe4..22cdc2ebff6 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -1080,7 +1080,7 @@ def on_chord_part_return(self, request, state, result, **kwargs): ) finally: deps.delete() - self.client.delete(key) + self.delete(key) else: self.expire(key, self.expires) diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index 90fbae09449..eee6f18adef 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -1,6 +1,7 @@ """AWS DynamoDB result store backend.""" from collections import namedtuple from time import sleep, time +from typing import Any, Dict from kombu.utils.url import _parse_url as parse_url @@ -54,11 +55,15 @@ class DynamoDBBackend(KeyValueStoreBackend): supports_autoexpire = True _key_field = DynamoDBAttribute(name='id', data_type='S') + # Each record has either a value field or count field _value_field = DynamoDBAttribute(name='result', data_type='B') + _count_filed = DynamoDBAttribute(name="chord_count", data_type='N') _timestamp_field = DynamoDBAttribute(name='timestamp', data_type='N') _ttl_field = DynamoDBAttribute(name='ttl', data_type='N') _available_fields = None + implements_incr = True + def __init__(self, url=None, table_name=None, *args, **kwargs): super().__init__(*args, **kwargs) @@ -459,6 +464,40 @@ def _prepare_put_request(self, key, value): }) return put_request + def _prepare_init_count_request(self, key: str) -> Dict[str, Any]: + """Construct the counter initialization request parameters""" + timestamp = time() + return { + 'TableName': self.table_name, + 'Item': { + self._key_field.name: { + self._key_field.data_type: key + }, + self._count_filed.name: { + self._count_filed.data_type: "0" + }, + self._timestamp_field.name: { + self._timestamp_field.data_type: str(timestamp) + } + } + } + + def _prepare_inc_count_request(self, key: str) -> Dict[str, Any]: + """Construct the counter increment request parameters""" + return { + 'TableName': self.table_name, + 'Key': { + self._key_field.name: { + self._key_field.data_type: key + } + }, + 'UpdateExpression': f"set {self._count_filed.name} = {self._count_filed.name} + :num", + "ExpressionAttributeValues": { + ":num": {"N": "1"}, + }, + "ReturnValues" : "UPDATED_NEW", + } + def _item_to_dict(self, raw_response): """Convert get_item() response to field-value pairs.""" if 'Item' not in raw_response: @@ -491,3 +530,18 @@ def delete(self, key): key = str(key) request_parameters = self._prepare_get_request(key) self.client.delete_item(**request_parameters) + + def incr(self, key: bytes) -> int: + """Atomically increase the chord_count and return the new count""" + key = str(key) + request_parameters = self._prepare_inc_count_request(key) + item_response = self.client.update_item(**request_parameters) + new_count: str = item_response["Attributes"][self._count_filed.name][self._count_filed.data_type] + return int(new_count) + + def _apply_chord_incr(self, header_result_args, body, **kwargs): + chord_key = self.get_key_for_chord(header_result_args[0]) + init_count_request = self._prepare_init_count_request(str(chord_key)) + self.client.put_item(**init_count_request) + return super()._apply_chord_incr( + header_result_args, body, **kwargs) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 58e8dbd8c12..f9c8c1d323e 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1000,11 +1000,11 @@ Example implementation: raise self.retry(countdown=interval, max_retries=max_retries) -This is used by all result backends except Redis and Memcached: they +This is used by all result backends except Redis, Memcached and DynamoDB: they increment a counter after each task in the header, then applies the callback when the counter exceeds the number of tasks in the set. -The Redis and Memcached approach is a much better solution, but not easily +The Redis, Memcached and DynamoDB approach is a much better solution, but not easily implemented in other backends (suggestions welcome!). .. note:: diff --git a/t/unit/backends/test_dynamodb.py b/t/unit/backends/test_dynamodb.py index 0afb425e1d1..c6004e410e6 100644 --- a/t/unit/backends/test_dynamodb.py +++ b/t/unit/backends/test_dynamodb.py @@ -1,9 +1,9 @@ from decimal import Decimal -from unittest.mock import MagicMock, Mock, patch, sentinel +from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel import pytest -from celery import states +from celery import states, uuid from celery.backends import dynamodb as module from celery.backends.dynamodb import DynamoDBBackend from celery.exceptions import ImproperlyConfigured @@ -426,6 +426,34 @@ def test_prepare_put_request_with_ttl(self): result = self.backend._prepare_put_request('abcdef', 'val') assert result == expected + def test_prepare_init_count_request(self): + expected = { + 'TableName': 'celery', + 'Item': { + 'id': {'S': 'abcdef'}, + 'chord_count': {'N': '0'}, + 'timestamp': { + 'N': str(Decimal(self._static_timestamp)) + }, + } + } + with patch('celery.backends.dynamodb.time', self._mock_time): + result = self.backend._prepare_init_count_request('abcdef') + assert result == expected + + def test_prepare_inc_count_request(self): + expected = { + 'TableName': 'celery', + 'Key': { + 'id': {'S': 'abcdef'}, + }, + 'UpdateExpression': 'set chord_count = chord_count + :num', + 'ExpressionAttributeValues': {":num": {"N": "1"}}, + 'ReturnValues': 'UPDATED_NEW', + } + result = self.backend._prepare_inc_count_request('abcdef') + assert result == expected + def test_item_to_dict(self): boto_response = { 'Item': { @@ -517,6 +545,39 @@ def test_delete(self): TableName='celery' ) + def test_inc(self): + mocked_incr_response = { + 'Attributes': { + 'chord_count': { + 'N': '1' + } + }, + 'ResponseMetadata': { + 'RequestId': '16d31c72-51f6-4538-9415-499f1135dc59', + 'HTTPStatusCode': 200, + 'HTTPHeaders': { + 'date': 'Wed, 10 Jan 2024 17:53:41 GMT', + 'x-amzn-requestid': '16d31c72-51f6-4538-9415-499f1135dc59', + 'content-type': 'application/x-amz-json-1.0', + 'x-amz-crc32': '3438282865', + 'content-length': '40', + 'server': 'Jetty(11.0.17)' + }, + 'RetryAttempts': 0 + } + } + self.backend._client = MagicMock() + self.backend._client.update_item = MagicMock(return_value=mocked_incr_response) + + assert self.backend.incr('1f3fab') == 1 + self.backend.client.update_item.assert_called_once_with( + Key={'id': {'S': '1f3fab'}}, + TableName='celery', + UpdateExpression='set chord_count = chord_count + :num', + ExpressionAttributeValues={":num": {"N": "1"}}, + ReturnValues='UPDATED_NEW', + ) + def test_backend_by_url(self, url='dynamodb://'): from celery.app import backends from celery.backends.dynamodb import DynamoDBBackend @@ -537,3 +598,33 @@ def test_backend_params_by_url(self): assert self.backend.write_capacity_units == 20 assert self.backend.time_to_live_seconds == 600 assert self.backend.endpoint_url is None + + def test_apply_chord(self, unlock="celery.chord_unlock"): + self.app.tasks[unlock] = Mock() + chord_uuid = uuid() + header_result_args = ( + chord_uuid, + [self.app.AsyncResult(x) for x in range(3)], + ) + self.backend._client = MagicMock() + self.backend.apply_chord(header_result_args, None) + assert self.backend._client.put_item.call_args_list == [ + call( + TableName="celery", + Item={ + "id": {"S": f"b'chord-unlock-{chord_uuid}'"}, + "chord_count": {"N": "0"}, + "timestamp": {"N": ANY}, + }, + ), + call( + TableName="celery", + Item={ + "id": {"S": f"b'celery-taskset-meta-{chord_uuid}'"}, + "result": { + "B": ANY, + }, + "timestamp": {"N": ANY}, + }, + ), + ] From da1146ab60065847b9742bb61190d52a7a2c5fdf Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Mon, 29 Jan 2024 17:12:43 +0000 Subject: [PATCH 0643/1051] Add a Task class specialised for Django (#8491) * Add a Task class for Django * Automatically use specialised Django Task class * Add unit tests for specialized Django task * Don't use specialized Django task if customised by user * Add patch to avoid side effects with other tests * Rename task class to DjangoTask * Add versionadded * Add reference page for new DjangoTask * Fix generation of reference documentation for DjangoTask * Fix links & extend documentation * Fix link to base task in docs * Improve links in DjangoTask docs * Improve more links in DjangoTask docs * Apply suggestions from code review Co-authored-by: Asif Saif Uddin * Update Django example to demo the new delay_on_commit() method * Replace try/catch ImportError for documentation by autodoc_mock_imports --------- Co-authored-by: Asif Saif Uddin --- celery/app/base.py | 1 + celery/contrib/django/__init__.py | 0 celery/contrib/django/task.py | 21 +++++++ celery/fixups/django.py | 3 + docs/conf.py | 3 +- docs/django/first-steps-with-django.rst | 58 +++++++++++++++++++ docs/reference/celery.contrib.django.task.rst | 17 ++++++ docs/reference/index.rst | 1 + examples/django/README.rst | 8 ++- t/unit/contrib/django/__init__.py | 0 t/unit/contrib/django/test_task.py | 32 ++++++++++ t/unit/fixups/test_django.py | 26 ++++++++- 12 files changed, 167 insertions(+), 3 deletions(-) create mode 100644 celery/contrib/django/__init__.py create mode 100644 celery/contrib/django/task.py create mode 100644 docs/reference/celery.contrib.django.task.rst create mode 100644 t/unit/contrib/django/__init__.py create mode 100644 t/unit/contrib/django/test_task.py diff --git a/celery/app/base.py b/celery/app/base.py index 78012936e5e..863f264f854 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -240,6 +240,7 @@ def __init__(self, main=None, loader=None, backend=None, self.loader_cls = loader or self._get_default_loader() self.log_cls = log or self.log_cls self.control_cls = control or self.control_cls + self._custom_task_cls_used = bool(task_cls) self.task_cls = task_cls or self.task_cls self.set_as_current = set_as_current self.registry_cls = symbol_by_name(self.registry_cls) diff --git a/celery/contrib/django/__init__.py b/celery/contrib/django/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/celery/contrib/django/task.py b/celery/contrib/django/task.py new file mode 100644 index 00000000000..eacc7c66471 --- /dev/null +++ b/celery/contrib/django/task.py @@ -0,0 +1,21 @@ +import functools + +from django.db import transaction + +from celery.app.task import Task + + +class DjangoTask(Task): + """ + Extend the base :class:`~celery.app.task.Task` for Django. + + Provide a nicer API to trigger tasks at the end of the DB transaction. + """ + + def delay_on_commit(self, *args, **kwargs): + """Call :meth:`~celery.app.task.Task.delay` with Django's ``on_commit()``.""" + return transaction.on_commit(functools.partial(self.delay, *args, **kwargs)) + + def apply_async_on_commit(self, *args, **kwargs): + """Call :meth:`~celery.app.task.Task.apply_async` with Django's ``on_commit()``.""" + return transaction.on_commit(functools.partial(self.apply_async, *args, **kwargs)) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index adc26db08f8..5a8ca1b993a 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -78,6 +78,9 @@ def install(self) -> "DjangoFixup": self._settings = symbol_by_name('django.conf:settings') self.app.loader.now = self.now + if not self.app._custom_task_cls_used: + self.app.task_cls = 'celery.contrib.django.task:DjangoTask' + signals.import_modules.connect(self.on_import_modules) signals.worker_init.connect(self.on_worker_init) return self diff --git a/docs/conf.py b/docs/conf.py index 83ac849e98e..736240f1595 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -45,7 +45,8 @@ r'^http://localhost' ], autodoc_mock_imports=[ - 'riak' + 'riak', + 'django', ] )) diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index 35914e8098b..b8a9f739e7b 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -153,6 +153,64 @@ concrete app instance: You can find the full source code for the Django example project at: https://github.com/celery/celery/tree/main/examples/django/ +Trigger tasks at the end of the database transaction +---------------------------------------------------- + +A common pitfall with Django is triggering a task immediately and not wait until +the end of the database transaction, which means that the Celery task may run +before all changes are persisted to the database. For example: + +.. code-block:: python + + # views.py + def create_user(request): + # Note: simplified example, use a form to validate input + user = User.objects.create(username=request.POST['username']) + send_email.delay(user.pk) + return HttpResponse('User created') + + # task.py + @shared_task + def send_email(user_pk): + user = User.objects.get(pk=user_pk) + # send email ... + +In this case, the ``send_email`` task could start before the view has committed +the transaction to the database, and therefore the task may not be able to find +the user. + +A common solution is to use Django's `on_commit`_ hook to trigger the task +after the transaction has been committed: + +.. _on_commit: https://docs.djangoproject.com/en/stable/topics/db/transactions/#django.db.transaction.on_commit + +.. code-block:: diff + + - send_email.delay(user.pk) + + transaction.on_commit(lambda: send_email.delay(user.pk)) + +.. versionadded:: 5.4 + +Since this is such a common pattern, Celery 5.4 introduced a handy shortcut for this, +using a :class:`~celery.contrib.django.task.DjangoTask`. Instead of calling +:meth:`~celery.app.task.Task.delay`, you should call +:meth:`~celery.contrib.django.task.DjangoTask.delay_on_commit`: + +.. code-block:: diff + + - send_email.delay(user.pk) + + send_email.delay_on_commit(user.pk) + + +This API takes care of wrapping the call into the `on_commit`_ hook for you. +In rare cases where you want to trigger a task without waiting, the existing +:meth:`~celery.app.task.Task.delay` API is still available. + +This task class should be used automatically if you've follow the setup steps above. +However, if your app :ref:`uses a custom task base class `, +you'll need inherit from :class:`~celery.contrib.django.task.DjangoTask` instead of +:class:`~celery.app.task.Task` to get this behaviour. + Extensions ========== diff --git a/docs/reference/celery.contrib.django.task.rst b/docs/reference/celery.contrib.django.task.rst new file mode 100644 index 00000000000..6403afd0238 --- /dev/null +++ b/docs/reference/celery.contrib.django.task.rst @@ -0,0 +1,17 @@ +==================================== + ``celery.contrib.django.task`` +==================================== + +.. versionadded:: 5.4 + +.. contents:: + :local: + +API Reference +============= + +.. currentmodule:: celery.contrib.django.task + +.. automodule:: celery.contrib.django.task + :members: + :undoc-members: diff --git a/docs/reference/index.rst b/docs/reference/index.rst index 19208fa22d0..c1fa7aed9d2 100644 --- a/docs/reference/index.rst +++ b/docs/reference/index.rst @@ -37,6 +37,7 @@ celery.loaders.base celery.states celery.contrib.abortable + celery.contrib.django.task celery.contrib.migrate celery.contrib.pytest celery.contrib.sphinx diff --git a/examples/django/README.rst b/examples/django/README.rst index 0bb8ef49315..188c8dd50a7 100644 --- a/examples/django/README.rst +++ b/examples/django/README.rst @@ -55,6 +55,12 @@ Running a task $ python ./manage.py shell >>> from demoapp.tasks import add, mul, xsum - >>> res = add.delay(2,3) + >>> res = add.delay_on_commit(2, 3) >>> res.get() 5 + +.. note:: + + The ``delay_on_commit`` method is only available when using Django, + and was added in Celery 5.4. If you are using an older version of Celery, + you can use ``delay`` instead. diff --git a/t/unit/contrib/django/__init__.py b/t/unit/contrib/django/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/unit/contrib/django/test_task.py b/t/unit/contrib/django/test_task.py new file mode 100644 index 00000000000..52b45b84bc4 --- /dev/null +++ b/t/unit/contrib/django/test_task.py @@ -0,0 +1,32 @@ +from unittest.mock import patch + +import pytest + + +@pytest.mark.patched_module( + 'django', + 'django.db', + 'django.db.transaction', +) +@pytest.mark.usefixtures("module") +class test_DjangoTask: + @pytest.fixture + def task_instance(self): + from celery.contrib.django.task import DjangoTask + yield DjangoTask() + + @pytest.fixture(name="on_commit") + def on_commit(self): + with patch( + 'django.db.transaction.on_commit', + side_effect=lambda f: f(), + ) as patched_on_commit: + yield patched_on_commit + + def test_delay_on_commit(self, task_instance, on_commit): + result = task_instance.delay_on_commit() + assert result is not None + + def test_apply_async_on_commit(self, task_instance, on_commit): + result = task_instance.apply_async_on_commit() + assert result is not None diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 8a97884ed4a..b25bf0879b5 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -87,7 +87,12 @@ def test_init(self): with self.fixup_context(self.app) as (f, importmod, sym): assert f - def test_install(self, patching): + @pytest.mark.patched_module( + 'django', + 'django.db', + 'django.db.transaction', + ) + def test_install(self, patching, module): self.app.loader = Mock() self.cw = patching('os.getcwd') self.p = patching('sys.path') @@ -97,8 +102,27 @@ def test_install(self, patching): f.install() self.sigs.worker_init.connect.assert_called_with(f.on_worker_init) assert self.app.loader.now == f.now + + # Specialized Task class is used + assert self.app.task_cls == 'celery.contrib.django.task:DjangoTask' + from celery.contrib.django.task import DjangoTask + assert issubclass(f.app.Task, DjangoTask) + assert hasattr(f.app.Task, 'delay_on_commit') + assert hasattr(f.app.Task, 'apply_async_on_commit') + self.p.insert.assert_called_with(0, '/opt/vandelay') + def test_install_custom_user_task(self, patching): + patching('celery.fixups.django.signals') + + self.app.task_cls = 'myapp.celery.tasks:Task' + self.app._custom_task_cls_used = True + + with self.fixup_context(self.app) as (f, _, _): + f.install() + # Specialized Task class is NOT used + assert self.app.task_cls == 'myapp.celery.tasks:Task' + def test_now(self): with self.fixup_context(self.app) as (f, _, _): assert f.now(utc=True) From eff0b2fb2ae6579542e38fceae63f436bfe25b5e Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Tue, 30 Jan 2024 12:54:48 +0200 Subject: [PATCH 0644/1051] Sync kombu versions in requirements and setup.cfg (#8825) --- requirements/extras/sqs.txt | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 7aa763de377..03d1687cfcd 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1,4 +1,4 @@ boto3>=1.26.143 pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython" urllib3>=1.26.16 -kombu[sqs]>=5.3.0 +kombu[sqs]>=5.3.4 diff --git a/setup.cfg b/setup.cfg index a452ae09a64..1d66df8b7a2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,7 +35,7 @@ per-file-ignores = requires = backports.zoneinfo>=0.2.1;python_version<'3.9' tzdata>=2022.7 billiard >=4.1.0,<5.0 - kombu >= 5.3.2,<6.0.0 + kombu >= 5.3.4,<6.0.0 [bdist_wheel] universal = 0 From 1b01fe7b2f6b9579b68c13d4cece76f91d12e160 Mon Sep 17 00:00:00 2001 From: Eri Date: Tue, 30 Jan 2024 16:51:48 +0100 Subject: [PATCH 0645/1051] chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8822) (#8826) * chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8822) This patch introduces `workflow_dispatch` trigger into CI, to allow for more manual and targeted control of running independent pipelines during CI debugging or test environments. A solution to help developers run isolated workflows from their CLI, in case of failure. At the moment, the implmentation respects strong defaults according to the events documentation: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#providing-inputs * chore(ci): update all workflows with `workflow_dispatch` event --- .github/workflows/codeql-analysis.yml | 4 +++- .github/workflows/docker.yml | 1 + .github/workflows/linter.yml | 2 +- .github/workflows/python-package.yml | 2 ++ .github/workflows/semgrep.yml | 2 ++ 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a1dcabfe893..d0b8564bb86 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -17,7 +17,9 @@ on: pull_request: # The branches below must be a subset of the branches above branches: [ main ] - + workflow_dispatch: + + jobs: analyze: diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 65dd0914029..380a87c0eff 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -18,6 +18,7 @@ on: - '**.toml' - '/docker/**' - '.github/workflows/docker.yml' + workflow_dispatch: jobs: diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 31fa81f88cf..f12f0169627 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -1,6 +1,6 @@ name: Linter -on: [pull_request] +on: [pull_request, workflow_dispatch] jobs: linter: diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 3efa187bc3e..88f83caf71c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -20,6 +20,8 @@ on: - '**.toml' - '.github/workflows/python-package.yml' - "tox.ini" + workflow_dispatch: + permissions: contents: read # to fetch code (actions/checkout) diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml index 1352b65ae16..ddb065dbe48 100644 --- a/.github/workflows/semgrep.yml +++ b/.github/workflows/semgrep.yml @@ -9,6 +9,8 @@ on: schedule: # random HH:MM to avoid a load spike on GitHub Actions at 00:00 - cron: 44 6 * * * + workflow_dispatch: + name: Semgrep jobs: semgrep: From 58d2e67a0c0050e793c54928418749d68ee4e3bb Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 30 Jan 2024 11:12:15 -0800 Subject: [PATCH 0646/1051] Update cryptography from 42.0.1 to 42.0.2 (#8827) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index d0384ae0df0..1041fe6906c 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.1 +cryptography==42.0.2 From 32a285dd956756322e8a9c9310731e8419a3660b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 31 Jan 2024 13:33:52 +0200 Subject: [PATCH 0647/1051] Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) --- docs/getting-started/backends-and-brokers/sqs.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/sqs.rst b/docs/getting-started/backends-and-brokers/sqs.rst index a9f82686910..9017871b984 100644 --- a/docs/getting-started/backends-and-brokers/sqs.rst +++ b/docs/getting-started/backends-and-brokers/sqs.rst @@ -15,7 +15,7 @@ the ``celery[sqs]`` :ref:`bundle `: .. code-block:: console - $ pip install celery[sqs] + $ pip install "celery[sqs]" .. _broker-sqs-configuration: From f6bf836041f151c25a4ba86e7ebdc2102379340a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 12:25:49 +0200 Subject: [PATCH 0648/1051] Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) Bumps [pre-commit/action](https://github.com/pre-commit/action) from 3.0.0 to 3.0.1. - [Release notes](https://github.com/pre-commit/action/releases) - [Commits](https://github.com/pre-commit/action/compare/v3.0.0...v3.0.1) --- updated-dependencies: - dependency-name: pre-commit/action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/linter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index f12f0169627..50d911657fc 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -11,4 +11,4 @@ jobs: uses: actions/checkout@v4 - name: Run pre-commit - uses: pre-commit/action@v3.0.0 + uses: pre-commit/action@v3.0.1 From acae57f59d0102e6cad57102fbf285e01132f6f7 Mon Sep 17 00:00:00 2001 From: Steve Kowalik Date: Fri, 9 Feb 2024 22:38:47 +1100 Subject: [PATCH 0649/1051] Support moto 5.0 (#8838) moto 5.0 has been released, and the major change is to pull all of the seperate mock calls into one -- mock_aws. Continue to support moto 4, since it's easy to do so. --- requirements/test.txt | 2 +- t/unit/backends/test_s3.py | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index 579a73977fd..79bf094fda6 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,7 +6,7 @@ pytest-timeout==2.2.0 pytest-click==1.1.0 pytest-order==1.2.0 boto3>=1.26.143 -moto>=4.1.11,<5.0.0 +moto>=4.1.11,<5.1.0 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" pre-commit==3.5.0 diff --git a/t/unit/backends/test_s3.py b/t/unit/backends/test_s3.py index fdea04b32cc..4929e23323d 100644 --- a/t/unit/backends/test_s3.py +++ b/t/unit/backends/test_s3.py @@ -3,7 +3,11 @@ import boto3 import pytest from botocore.exceptions import ClientError -from moto import mock_s3 + +try: + from moto import mock_aws +except ImportError: + from moto import mock_s3 as mock_aws from celery import states from celery.backends.s3 import S3Backend @@ -84,7 +88,7 @@ def test_it_creates_an_aws_s3_resource(self, 's3', endpoint_url=endpoint_url) @pytest.mark.parametrize("key", ['uuid', b'uuid']) - @mock_s3 + @mock_aws def test_set_and_get_a_key(self, key): self._mock_s3_resource() @@ -97,7 +101,7 @@ def test_set_and_get_a_key(self, key): assert s3_backend.get(key) == 'another_status' - @mock_s3 + @mock_aws def test_set_and_get_a_result(self): self._mock_s3_resource() @@ -111,7 +115,7 @@ def test_set_and_get_a_result(self): value = s3_backend.get_result('foo') assert value == 'baar' - @mock_s3 + @mock_aws def test_get_a_missing_key(self): self._mock_s3_resource() @@ -141,7 +145,7 @@ def test_with_error_while_getting_key(self, mock_boto3): s3_backend.get('uuidddd') @pytest.mark.parametrize("key", ['uuid', b'uuid']) - @mock_s3 + @mock_aws def test_delete_a_key(self, key): self._mock_s3_resource() @@ -157,7 +161,7 @@ def test_delete_a_key(self, key): assert s3_backend.get(key) is None - @mock_s3 + @mock_aws def test_with_a_non_existing_bucket(self): self._mock_s3_resource() From d80fb3061b9f0b31f6e1b4975108dd6b8d279e6e Mon Sep 17 00:00:00 2001 From: Murray Christopherson Date: Mon, 12 Feb 2024 18:16:59 -0800 Subject: [PATCH 0650/1051] Another fix for `link_error` signatures being `dict`s instead of `Signature`s (#8841) * Another fix for `link_error` signatures being `dict`s instead of `Signature`s Related to https://github.com/celery/celery/issues/8678 * whitespace * typo * adding unittest * typo --- celery/canvas.py | 2 ++ t/unit/tasks/test_canvas.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 469d3ee99fb..909962c1639 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1673,6 +1673,8 @@ def link_error(self, sig): # # We return a concretised tuple of the signatures actually applied to # each child task signature, of which there might be none! + sig = maybe_signature(sig) + return tuple(child_task.link_error(sig.clone(immutable=True)) for child_task in self.tasks) def _prepared(self, tasks, partial_args, group_id, root_id, app, diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index a90d203e234..5bed3d8ec51 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -869,6 +869,16 @@ def test_link_error(self): for child_sig in g1.tasks: child_sig.link_error.assert_called_with(sig.clone(immutable=True)) + def test_link_error_with_dict_sig(self): + g1 = group(Mock(name='t1'), Mock(name='t2'), app=self.app) + errback = signature('tcb') + errback_dict = dict(errback) + g1.link_error(errback_dict) + # We expect that all group children will be given the errback to ensure + # it gets called + for child_sig in g1.tasks: + child_sig.link_error.assert_called_with(errback.clone(immutable=True)) + def test_apply_empty(self): x = group(app=self.app) x.apply() From 372c689aff00934f658f1a3423840da32efb50bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 04:17:23 +0200 Subject: [PATCH 0651/1051] Bump codecov/codecov-action from 3 to 4 (#8831) * Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update .github/workflows/python-package.yml --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati --- .github/workflows/python-package.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 88f83caf71c..8827da67018 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -67,10 +67,11 @@ jobs: run: | tox --verbose --verbose - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: flags: unittests # optional fail_ci_if_error: true # optional (default = false) + token: ${{ secrets.CODECOV_TOKEN }} verbose: true # optional (default = false) Integration: From 0078d67f563e9a460bb9f47eacc2ecaa5186b7eb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 13 Feb 2024 04:17:52 +0200 Subject: [PATCH 0652/1051] Upgrade from pytest-celery v1.0.0b1 -> v1.0.0b2 (#8843) * Upgrade from pytest-celery v1.0.0b1 -> v1.0.0b2 * Fixed docker/docs/Dockerfile --- docker/docs/Dockerfile | 4 ++++ requirements/test.txt | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docker/docs/Dockerfile b/docker/docs/Dockerfile index 3005aa5fba5..0aa804b5f41 100644 --- a/docker/docs/Dockerfile +++ b/docker/docs/Dockerfile @@ -22,6 +22,10 @@ COPY /requirements /requirements # All imports needed for autodoc. RUN pip install -r /requirements/docs.txt -r /requirements/default.txt +COPY . /celery + +RUN pip install /celery + COPY docker/docs/start /start-docs RUN sed -i 's/\r$//g' /start-docs RUN chmod +x /start-docs diff --git a/requirements/test.txt b/requirements/test.txt index 79bf094fda6..bfb0d468a31 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==7.4.4 -pytest-celery==1.0.0b1 +pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 pytest-timeout==2.2.0 From 86543a51fa2c52b247c96a66c1917ec7abce051e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:11:35 +0200 Subject: [PATCH 0653/1051] Bump pytest from 7.4.4 to 8.0.0 (#8823) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.4 to 8.0.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.4...8.0.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index bfb0d468a31..da2b3eaca4b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==7.4.4 +pytest==8.0.0 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 From 7528900d53875bb634f92229ec6ed915c6fc58b6 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 13 Feb 2024 04:05:23 -0800 Subject: [PATCH 0654/1051] Update pre-commit to 3.6.1 (#8839) * Update pre-commit from 3.5.0 to 3.6.1 * Update requirements/test.txt * Update requirements/test.txt --------- Co-authored-by: Tomer Nosrati --- requirements/test.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index da2b3eaca4b..56f8c3e185e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -9,7 +9,8 @@ boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions mypy==1.8.0; platform_python_implementation=="CPython" -pre-commit==3.5.0 +pre-commit>=3.5.0,<3.6.0; python_version < '3.9' +pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From 3a53549f65f438ab7bc5be70980aa022e6cc3432 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Fri, 16 Feb 2024 03:18:53 -0800 Subject: [PATCH 0655/1051] Update cryptography from 42.0.2 to 42.0.3 (#8854) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 1041fe6906c..3669d114739 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.2 +cryptography==42.0.3 From 0eb5d0e63ab10e9d2c9c6293b241bf8863cbc0e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 17 Feb 2024 11:14:37 +0200 Subject: [PATCH 0656/1051] Bump pytest from 8.0.0 to 8.0.1 (#8855) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.0.0 to 8.0.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.0.0...8.0.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 56f8c3e185e..12562707fd3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.0.0 +pytest==8.0.1 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 From 63983940153389b8f2525bdf71253219fab5cc78 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 19:17:01 +0200 Subject: [PATCH 0657/1051] [pre-commit.ci] pre-commit autoupdate (#8861) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.15.0 → v3.15.1](https://github.com/asottile/pyupgrade/compare/v3.15.0...v3.15.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66653ceaa63..ff2addbc262 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 + rev: v3.15.1 hooks: - id: pyupgrade args: ["--py38-plus"] From 71fce1b692b3dfe68b921c146a47c461054a1428 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 20 Feb 2024 21:49:47 -0800 Subject: [PATCH 0658/1051] Update cryptography from 42.0.3 to 42.0.4 (#8864) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 3669d114739..e772b712f49 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.3 +cryptography==42.0.4 From 800b663bbeaaa64cf2c5bda23508058520eac3b7 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 25 Feb 2024 01:10:31 -0800 Subject: [PATCH 0659/1051] Update pytest from 8.0.1 to 8.0.2 (#8870) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 12562707fd3..531c44e209c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.0.1 +pytest==8.0.2 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 From 806e2c1d4c8c8ef9f2371a88a1b71fa0b2319ae5 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 25 Feb 2024 01:12:36 -0800 Subject: [PATCH 0660/1051] Update cryptography from 42.0.4 to 42.0.5 (#8869) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index e772b712f49..3beaa30e8a6 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.4 +cryptography==42.0.5 From ac16f239985cf9248155b95788c4b6227f7f1b94 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 25 Feb 2024 11:13:58 +0200 Subject: [PATCH 0661/1051] Update elasticsearch requirement from <=8.12.0 to <=8.12.1 (#8867) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.12.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 39417c6d221..3a5f5003b57 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.12.0 +elasticsearch<=8.12.1 elastic-transport<=8.12.0 From f8c952d8e2a6746cd7410b1b662d7b7045b347ef Mon Sep 17 00:00:00 2001 From: Hann Wang Date: Tue, 27 Feb 2024 23:29:51 +0800 Subject: [PATCH 0662/1051] Eliminate consecutive chords generated by group | task upgrade (#8663) * chord | task -> attach to body in prepare_steps * add unit test * fix: clone original chord before modifying its body * fix: misuse of task clone * turning chained chords into a single chord with nested bodies * remove the for-loop and consider the type of the unrolled group * replace pop with slice * add integration tests * add unit test * updated tests --------- Co-authored-by: Wang Han --- celery/canvas.py | 9 ++++++ t/integration/test_canvas.py | 59 ++++++++++++++++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 30 ++++++++++++++++++ 3 files changed, 98 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 909962c1639..70c7b139212 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -958,6 +958,8 @@ def __or__(self, other): if isinstance(other, group): # unroll group with one member other = maybe_unroll_group(other) + if not isinstance(other, group): + return self.__or__(other) # chain | group() -> chain tasks = self.unchain_tasks() if not tasks: @@ -981,6 +983,13 @@ def __or__(self, other): sig = self.clone() sig.tasks[-1] = chord( sig.tasks[-1], other, app=self._app) + # In the scenario where the second-to-last item in a chain is a chord, + # it leads to a situation where two consecutive chords are formed. + # In such cases, a further upgrade can be considered. + # This would involve chaining the body of the second-to-last chord with the last chord." + if len(sig.tasks) > 1 and isinstance(sig.tasks[-2], chord): + sig.tasks[-2].body = sig.tasks[-2].body | sig.tasks[-1] + sig.tasks = sig.tasks[:-1] return sig elif self.tasks and isinstance(self.tasks[-1], chord): # CHAIN [last item is chord] -> chain with chord body. diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 7c78a98148b..45cd24f6949 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1037,6 +1037,65 @@ def test_freezing_chain_sets_id_of_last_task(self, manager): c.freeze(last_task.id) assert c.id == last_task.id + @pytest.mark.parametrize( + "group_last_task", + [False, True], + ) + def test_chaining_upgraded_chords_mixed_canvas_protocol_2( + self, manager, subtests, group_last_task): + """ This test is built to reproduce the github issue https://github.com/celery/celery/issues/8662 + + The issue describes a canvas where a chain of groups are executed multiple times instead of once. + This test is built to reproduce the issue and to verify that the issue is fixed. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + + redis_connection = get_redis_connection() + redis_key = 'echo_chamber' + + c = chain( + group([ + redis_echo.si('1', redis_key=redis_key), + redis_echo.si('2', redis_key=redis_key) + ]), + group([ + redis_echo.si('3', redis_key=redis_key), + redis_echo.si('4', redis_key=redis_key), + redis_echo.si('5', redis_key=redis_key) + ]), + group([ + redis_echo.si('6', redis_key=redis_key), + redis_echo.si('7', redis_key=redis_key), + redis_echo.si('8', redis_key=redis_key), + redis_echo.si('9', redis_key=redis_key) + ]), + redis_echo.si('Done', redis_key='Done') if not group_last_task else + group(redis_echo.si('Done', redis_key='Done')), + ) + + with subtests.test(msg='Run the chain and wait for completion'): + redis_connection.delete(redis_key, 'Done') + c.delay().get(timeout=TIMEOUT) + await_redis_list_message_length(1, redis_key='Done', timeout=10) + + with subtests.test(msg='All tasks are executed once'): + actual = [ + sig.decode('utf-8') + for sig in redis_connection.lrange(redis_key, 0, -1) + ] + expected = [str(i) for i in range(1, 10)] + with subtests.test(msg='All tasks are executed once'): + assert sorted(actual) == sorted(expected) + + # Cleanup + redis_connection.delete(redis_key, 'Done') + class test_result_set: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 5bed3d8ec51..b4d03a56e3c 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -571,6 +571,36 @@ def test_chain_of_chord_upgrade_on_chaining(self): assert isinstance(new_chain, _chain) assert isinstance(new_chain.tasks[0].body, chord) + @pytest.mark.parametrize( + "group_last_task", + [False, True], + ) + def test_chain_of_chord_upgrade_on_chaining__protocol_2( + self, group_last_task): + c = chain( + group([self.add.s(i, i) for i in range(5)], app=self.app), + group([self.add.s(i, i) for i in range(10, 15)], app=self.app), + group([self.add.s(i, i) for i in range(20, 25)], app=self.app), + self.add.s(30) if not group_last_task else group(self.add.s(30), + app=self.app)) + assert isinstance(c, _chain) + assert len( + c.tasks + ) == 1, "Consecutive chords should be further upgraded to a single chord." + assert isinstance(c.tasks[0], chord) + + def test_chain_of_chord_upgrade_on_chaining__protocol_3(self): + c = chain( + chain([self.add.s(i, i) for i in range(5)]), + group([self.add.s(i, i) for i in range(10, 15)], app=self.app), + chord([signature('header')], signature('body'), app=self.app), + group([self.add.s(i, i) for i in range(20, 25)], app=self.app)) + assert isinstance(c, _chain) + assert isinstance( + c.tasks[-1], chord + ), "Chord followed by a group should be upgraded to a single chord with chained body." + assert len(c.tasks) == 6 + def test_apply_options(self): class static(Signature): From 582e169b9d1750fd416cfa94ecfd26f43568f7b2 Mon Sep 17 00:00:00 2001 From: Thorben Dahl Date: Wed, 28 Feb 2024 15:50:07 +0100 Subject: [PATCH 0663/1051] Make custom remote control commands available in CLI (#8489) * Make custom remote control commands available in CLI * fixup (remove accidentally commited todo comments) * Avoid breaking test_worker by modifying os.environ * Reset global state after each preload test --- celery/bin/control.py | 89 +++++++++++++++++++------ t/unit/app/test_preload_cli.py | 69 +++++++++---------- t/unit/bin/proj/app_with_custom_cmds.py | 24 +++++++ t/unit/bin/test_control.py | 82 +++++++++++++++++++++++ 4 files changed, 206 insertions(+), 58 deletions(-) create mode 100644 t/unit/bin/proj/app_with_custom_cmds.py create mode 100644 t/unit/bin/test_control.py diff --git a/celery/bin/control.py b/celery/bin/control.py index f7bba96ddf0..38a917ea0f2 100644 --- a/celery/bin/control.py +++ b/celery/bin/control.py @@ -1,5 +1,6 @@ """The ``celery control``, ``. inspect`` and ``. status`` programs.""" from functools import partial +from typing import Literal import click from kombu.utils.json import dumps @@ -39,18 +40,69 @@ def _consume_arguments(meta, method, args): args[:] = args[i:] -def _compile_arguments(action, args): - meta = Panel.meta[action] +def _compile_arguments(command, args): + meta = Panel.meta[command] arguments = {} if meta.args: arguments.update({ - k: v for k, v in _consume_arguments(meta, action, args) + k: v for k, v in _consume_arguments(meta, command, args) }) if meta.variadic: arguments.update({meta.variadic: args}) return arguments +_RemoteControlType = Literal['inspect', 'control'] + + +def _verify_command_name(type_: _RemoteControlType, command: str) -> None: + choices = _get_commands_of_type(type_) + + if command not in choices: + command_listing = ", ".join(choices) + raise click.UsageError( + message=f'Command {command} not recognized. Available {type_} commands: {command_listing}', + ) + + +def _list_option(type_: _RemoteControlType): + def callback(ctx: click.Context, param, value) -> None: + if not value: + return + choices = _get_commands_of_type(type_) + + formatter = click.HelpFormatter() + + with formatter.section(f'{type_.capitalize()} Commands'): + command_list = [] + for command_name, info in choices.items(): + if info.signature: + command_preview = f'{command_name} {info.signature}' + else: + command_preview = command_name + command_list.append((command_preview, info.help)) + formatter.write_dl(command_list) + ctx.obj.echo(formatter.getvalue(), nl=False) + ctx.exit() + + return click.option( + '--list', + is_flag=True, + help=f'List available {type_} commands and exit.', + expose_value=False, + is_eager=True, + callback=callback, + ) + + +def _get_commands_of_type(type_: _RemoteControlType) -> dict: + command_name_info_pairs = [ + (name, info) for name, info in Panel.meta.items() + if info.type == type_ and info.visible + ] + return dict(sorted(command_name_info_pairs)) + + @click.command(cls=CeleryCommand) @click.option('-t', '--timeout', @@ -96,10 +148,8 @@ def status(ctx, timeout, destination, json, **kwargs): @click.command(cls=CeleryCommand, context_settings={'allow_extra_args': True}) -@click.argument("action", type=click.Choice([ - name for name, info in Panel.meta.items() - if info.type == 'inspect' and info.visible -])) +@click.argument('command') +@_list_option('inspect') @click.option('-t', '--timeout', cls=CeleryOption, @@ -121,19 +171,19 @@ def status(ctx, timeout, destination, json, **kwargs): help='Use json as output format.') @click.pass_context @handle_preload_options -def inspect(ctx, action, timeout, destination, json, **kwargs): - """Inspect the worker at runtime. +def inspect(ctx, command, timeout, destination, json, **kwargs): + """Inspect the workers by sending them the COMMAND inspect command. Availability: RabbitMQ (AMQP) and Redis transports. """ + _verify_command_name('inspect', command) callback = None if json else partial(_say_remote_command_reply, ctx, show_reply=True) - arguments = _compile_arguments(action, ctx.args) + arguments = _compile_arguments(command, ctx.args) inspect = ctx.obj.app.control.inspect(timeout=timeout, destination=destination, callback=callback) - replies = inspect._request(action, - **arguments) + replies = inspect._request(command, **arguments) if not replies: raise CeleryCommandException( @@ -153,10 +203,8 @@ def inspect(ctx, action, timeout, destination, json, **kwargs): @click.command(cls=CeleryCommand, context_settings={'allow_extra_args': True}) -@click.argument("action", type=click.Choice([ - name for name, info in Panel.meta.items() - if info.type == 'control' and info.visible -])) +@click.argument('command') +@_list_option('control') @click.option('-t', '--timeout', cls=CeleryOption, @@ -178,16 +226,17 @@ def inspect(ctx, action, timeout, destination, json, **kwargs): help='Use json as output format.') @click.pass_context @handle_preload_options -def control(ctx, action, timeout, destination, json): - """Workers remote control. +def control(ctx, command, timeout, destination, json): + """Send the COMMAND control command to the workers. Availability: RabbitMQ (AMQP), Redis, and MongoDB transports. """ + _verify_command_name('control', command) callback = None if json else partial(_say_remote_command_reply, ctx, show_reply=True) args = ctx.args - arguments = _compile_arguments(action, args) - replies = ctx.obj.app.control.broadcast(action, timeout=timeout, + arguments = _compile_arguments(command, args) + replies = ctx.obj.app.control.broadcast(command, timeout=timeout, destination=destination, callback=callback, reply=True, diff --git a/t/unit/app/test_preload_cli.py b/t/unit/app/test_preload_cli.py index a2241a1400d..9932f5b88d4 100644 --- a/t/unit/app/test_preload_cli.py +++ b/t/unit/app/test_preload_cli.py @@ -1,34 +1,41 @@ +import contextlib +from typing import Tuple +from unittest.mock import patch + +import pytest from click.testing import CliRunner from celery.bin.celery import celery -def test_preload_options(isolated_cli_runner: CliRunner): - # Verify commands like shell and purge can accept preload options. - # Projects like Pyramid-Celery's ini option should be valid preload - # options. - - # TODO: Find a way to run these separate invoke and assertions - # such that order does not matter. Currently, running - # the "t.unit.bin.proj.pyramid_celery_app" first seems - # to result in cache or memoization of the option. - # As a result, the expected exception is not raised when - # the invoke on "t.unit.bin.proj.app" is run as a second - # call. +@pytest.fixture(autouse=True) +def reset_command_params_between_each_test(): + with contextlib.ExitStack() as stack: + for command in celery.commands.values(): + # We only need shallow copy -- preload options are appended to the list, + # existing options are kept as-is + params_copy = command.params[:] + patch_instance = patch.object(command, "params", params_copy) + stack.enter_context(patch_instance) - res_without_preload = isolated_cli_runner.invoke( - celery, - ["-A", "t.unit.bin.proj.app", "purge", "-f", "--ini", "some_ini.ini"], - catch_exceptions=True, - ) + yield - assert "No such option: --ini" in res_without_preload.stdout - assert res_without_preload.exit_code == 2 +@pytest.mark.parametrize( + "subcommand_with_params", + [ + ("purge", "-f"), + ("shell",), + ] +) +def test_preload_options(subcommand_with_params: Tuple[str, ...], isolated_cli_runner: CliRunner): + # Verify commands like shell and purge can accept preload options. + # Projects like Pyramid-Celery's ini option should be valid preload + # options. res_without_preload = isolated_cli_runner.invoke( celery, - ["-A", "t.unit.bin.proj.app", "shell", "--ini", "some_ini.ini"], - catch_exceptions=True, + ["-A", "t.unit.bin.proj.app", *subcommand_with_params, "--ini", "some_ini.ini"], + catch_exceptions=False, ) assert "No such option: --ini" in res_without_preload.stdout @@ -39,25 +46,11 @@ def test_preload_options(isolated_cli_runner: CliRunner): [ "-A", "t.unit.bin.proj.pyramid_celery_app", - "purge", - "-f", + *subcommand_with_params, "--ini", "some_ini.ini", ], - catch_exceptions=True, + catch_exceptions=False, ) - assert res_with_preload.exit_code == 0 - - res_with_preload = isolated_cli_runner.invoke( - celery, - [ - "-A", - "t.unit.bin.proj.pyramid_celery_app", - "shell", - "--ini", - "some_ini.ini", - ], - catch_exceptions=True, - ) - assert res_with_preload.exit_code == 0 + assert res_with_preload.exit_code == 0, res_with_preload.stdout diff --git a/t/unit/bin/proj/app_with_custom_cmds.py b/t/unit/bin/proj/app_with_custom_cmds.py new file mode 100644 index 00000000000..db96b99e700 --- /dev/null +++ b/t/unit/bin/proj/app_with_custom_cmds.py @@ -0,0 +1,24 @@ +from celery import Celery +from celery.worker.control import control_command, inspect_command + + +@control_command( + args=[('a', int), ('b', int)], + signature='a b', +) +def custom_control_cmd(state, a, b): + """Ask the workers to reply with a and b.""" + return {'ok': f'Received {a} and {b}'} + + +@inspect_command( + args=[('x', int)], + signature='x', +) +def custom_inspect_cmd(state, x): + """Ask the workers to reply with x.""" + return {'ok': f'Received {x}'} + + +app = Celery(set_as_current=False) +app.config_from_object('t.integration.test_worker_config') diff --git a/t/unit/bin/test_control.py b/t/unit/bin/test_control.py new file mode 100644 index 00000000000..6d3704e9dc2 --- /dev/null +++ b/t/unit/bin/test_control.py @@ -0,0 +1,82 @@ +import os +import re +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from celery.bin.celery import celery +from celery.platforms import EX_UNAVAILABLE + +_GLOBAL_OPTIONS = ['-A', 't.unit.bin.proj.app_with_custom_cmds', '--broker', 'memory://'] +_INSPECT_OPTIONS = ['--timeout', '0'] # Avoid waiting for the zero workers to reply + + +@pytest.fixture(autouse=True) +def clean_os_environ(): + # Celery modifies os.environ when given the CLI option --broker memory:// + # This interferes with other tests, so we need to reset os.environ + with patch.dict(os.environ, clear=True): + yield + + +@pytest.mark.parametrize( + ('celery_cmd', 'custom_cmd'), + [ + ('inspect', ('custom_inspect_cmd', '123')), + ('control', ('custom_control_cmd', '123', '456')), + ], +) +def test_custom_remote_command(celery_cmd, custom_cmd, isolated_cli_runner: CliRunner): + res = isolated_cli_runner.invoke( + celery, + [*_GLOBAL_OPTIONS, celery_cmd, *_INSPECT_OPTIONS, *custom_cmd], + catch_exceptions=False, + ) + assert res.exit_code == EX_UNAVAILABLE, (res, res.stdout) + assert res.stdout.strip() == 'Error: No nodes replied within time constraint' + + +@pytest.mark.parametrize( + ('celery_cmd', 'remote_cmd'), + [ + # Test nonexistent commands + ('inspect', 'this_command_does_not_exist'), + ('control', 'this_command_does_not_exist'), + # Test commands that exist, but are of the wrong type + ('inspect', 'custom_control_cmd'), + ('control', 'custom_inspect_cmd'), + ], +) +def test_unrecognized_remote_command(celery_cmd, remote_cmd, isolated_cli_runner: CliRunner): + res = isolated_cli_runner.invoke( + celery, + [*_GLOBAL_OPTIONS, celery_cmd, *_INSPECT_OPTIONS, remote_cmd], + catch_exceptions=False, + ) + assert res.exit_code == 2, (res, res.stdout) + assert f'Error: Command {remote_cmd} not recognized. Available {celery_cmd} commands: ' in res.stdout + + +_expected_inspect_regex = ( + '\n custom_inspect_cmd x\\s+Ask the workers to reply with x\\.\n' +) +_expected_control_regex = ( + '\n custom_control_cmd a b\\s+Ask the workers to reply with a and b\\.\n' +) + + +@pytest.mark.parametrize( + ('celery_cmd', 'expected_regex'), + [ + ('inspect', re.compile(_expected_inspect_regex, re.MULTILINE)), + ('control', re.compile(_expected_control_regex, re.MULTILINE)), + ], +) +def test_listing_remote_commands(celery_cmd, expected_regex, isolated_cli_runner: CliRunner): + res = isolated_cli_runner.invoke( + celery, + [*_GLOBAL_OPTIONS, celery_cmd, '--list'], + ) + assert res.exit_code == 0, (res, res.stdout) + assert expected_regex.search(res.stdout) From 06e91d913c424ddb862d9a5b50a5d3da0acdd217 Mon Sep 17 00:00:00 2001 From: Haim Daniel <64732931+haimjether@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:59:18 +0200 Subject: [PATCH 0664/1051] Add Google Cloud Storage (GCS) backend (#8868) * Add Google Cloud Storage (GCS) backend * Add extra google-cloud-storage requirements * Add gcs backend module * Add gcs backend to userguide configuration * Add gcs to backends in README * Add gcs app Namespace * Add configuration documentation * isort * Cosmetic: fix documentation * Add tests coverage for .client() method * Add tests coverage for missing storage import * Add tests coverage for parse_url() * Documentation: remove incorrect configuration param. * Remove unused options --- README.rst | 3 + celery/app/backends.py | 1 + celery/app/defaults.py | 6 + celery/backends/gcs.py | 141 +++++++++++++++ docs/getting-started/introduction.rst | 1 + docs/includes/installation.txt | 4 + .../reference/celery.backends.gcs.rst | 11 ++ docs/internals/reference/index.rst | 1 + docs/userguide/configuration.rst | 99 ++++++++++- requirements/extras/gcs.txt | 1 + requirements/test.txt | 1 + setup.py | 1 + t/unit/backends/test_gcs.py | 162 ++++++++++++++++++ 13 files changed, 430 insertions(+), 2 deletions(-) create mode 100644 celery/backends/gcs.py create mode 100644 docs/internals/reference/celery.backends.gcs.rst create mode 100644 requirements/extras/gcs.txt create mode 100644 t/unit/backends/test_gcs.py diff --git a/README.rst b/README.rst index e206ec30140..28a5dbcc3e4 100644 --- a/README.rst +++ b/README.rst @@ -317,6 +317,9 @@ Transports and Backends :``celery[s3]``: for using S3 Storage as a result backend. +:``celery[gcs]``: + for using Google Cloud Storage as a result backend. + :``celery[couchbase]``: for using Couchbase as a result backend. diff --git a/celery/app/backends.py b/celery/app/backends.py index 5481528f0c8..a274b8554b4 100644 --- a/celery/app/backends.py +++ b/celery/app/backends.py @@ -34,6 +34,7 @@ 'azureblockblob': 'celery.backends.azureblockblob:AzureBlockBlobBackend', 'arangodb': 'celery.backends.arangodb:ArangoDbBackend', 's3': 'celery.backends.s3:S3Backend', + 'gs': 'celery.backends.gcs:GCSBackend', } diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 2d357134126..523b56d72f6 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -140,6 +140,12 @@ def __repr__(self): connection_timeout=Option(20, type='int'), read_timeout=Option(120, type='int'), ), + gcs=Namespace( + bucket=Option(type='string'), + project=Option(type='string'), + base_path=Option('', type='string'), + ttl=Option(0, type='float'), + ), control=Namespace( queue_ttl=Option(300.0, type='float'), queue_expires=Option(10.0, type='float'), diff --git a/celery/backends/gcs.py b/celery/backends/gcs.py new file mode 100644 index 00000000000..c57c2e44960 --- /dev/null +++ b/celery/backends/gcs.py @@ -0,0 +1,141 @@ +"""Google Cloud Storage result store backend for Celery.""" +from concurrent.futures import ThreadPoolExecutor +from datetime import datetime, timedelta +from os import getpid +from threading import RLock + +from kombu.utils.encoding import bytes_to_str +from kombu.utils.functional import dictfilter +from kombu.utils.url import url_to_parts + +from celery.exceptions import ImproperlyConfigured + +from .base import KeyValueStoreBackend + +try: + import requests + from google.cloud import storage + from google.cloud.storage import Client + from google.cloud.storage.retry import DEFAULT_RETRY +except ImportError: + storage = None + +__all__ = ('GCSBackend',) + + +class GCSBackend(KeyValueStoreBackend): + """Google Cloud Storage task result backend.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._lock = RLock() + self._pid = getpid() + self._retry_policy = DEFAULT_RETRY + self._client = None + + if not storage: + raise ImproperlyConfigured( + 'You must install google-cloud-storage to use gcs backend' + ) + conf = self.app.conf + if self.url: + url_params = self._params_from_url() + conf.update(**dictfilter(url_params)) + + self.bucket_name = conf.get('gcs_bucket') + if not self.bucket_name: + raise ImproperlyConfigured( + 'Missing bucket name: specify gcs_bucket to use gcs backend' + ) + self.project = conf.get('gcs_project') + if not self.project: + raise ImproperlyConfigured( + 'Missing project:specify gcs_project to use gcs backend' + ) + self.base_path = conf.get('gcs_base_path', '').strip('/') + self._threadpool_maxsize = int(conf.get('gcs_threadpool_maxsize', 10)) + self.ttl = float(conf.get('gcs_ttl') or 0) + if self.ttl < 0: + raise ImproperlyConfigured( + f'Invalid ttl: {self.ttl} must be greater than or equal to 0' + ) + elif self.ttl: + if not self._is_bucket_lifecycle_rule_exists(): + raise ImproperlyConfigured( + f'Missing lifecycle rule to use gcs backend with ttl on ' + f'bucket: {self.bucket_name}' + ) + + def get(self, key): + key = bytes_to_str(key) + blob = self._get_blob(key) + try: + return blob.download_as_bytes(retry=self._retry_policy) + except storage.blob.NotFound: + return None + + def set(self, key, value): + key = bytes_to_str(key) + blob = self._get_blob(key) + if self.ttl: + blob.custom_time = datetime.utcnow() + timedelta(seconds=self.ttl) + blob.upload_from_string(value, retry=self._retry_policy) + + def delete(self, key): + key = bytes_to_str(key) + blob = self._get_blob(key) + if blob.exists(): + blob.delete(retry=self._retry_policy) + + def mget(self, keys): + with ThreadPoolExecutor() as pool: + return list(pool.map(self.get, keys)) + + @property + def client(self): + """Returns a storage client.""" + + # make sure it's thread-safe, as creating a new client is expensive + with self._lock: + if self._client and self._pid == getpid(): + return self._client + # make sure each process gets its own connection after a fork + self._client = Client(project=self.project) + self._pid = getpid() + + # config the number of connections to the server + adapter = requests.adapters.HTTPAdapter( + pool_connections=self._threadpool_maxsize, + pool_maxsize=self._threadpool_maxsize, + max_retries=3, + ) + client_http = self._client._http + client_http.mount("https://", adapter) + client_http._auth_request.session.mount("https://", adapter) + + return self._client + + @property + def bucket(self): + return self.client.bucket(self.bucket_name) + + def _get_blob(self, key): + key_bucket_path = f'{self.base_path}/{key}' if self.base_path else key + return self.bucket.blob(key_bucket_path) + + def _is_bucket_lifecycle_rule_exists(self): + bucket = self.bucket + bucket.reload() + for rule in bucket.lifecycle_rules: + if rule['action']['type'] == 'Delete': + return True + return False + + def _params_from_url(self): + url_parts = url_to_parts(self.url) + + return { + 'gcs_bucket': url_parts.hostname, + 'gcs_base_path': url_parts.path, + **url_parts.query, + } diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst index 18c672eb71a..3db4f3aebce 100644 --- a/docs/getting-started/introduction.rst +++ b/docs/getting-started/introduction.rst @@ -151,6 +151,7 @@ Celery is… - MongoDB, CouchDB, Couchbase, ArangoDB - Amazon DynamoDB, Amazon S3 - Microsoft Azure Block Blob, Microsoft Azure Cosmos DB + - Google Cloud Storage - File system - **Serialization** diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index ae79e63292d..7422f16fc65 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -115,6 +115,10 @@ Transports and Backends You should probably not use this in your requirements, it's here for informational purposes only. +:``celery[gcs]``: + for using the Google Cloud Storage as a result backend (*experimental*). + + .. _celery-installing-from-source: diff --git a/docs/internals/reference/celery.backends.gcs.rst b/docs/internals/reference/celery.backends.gcs.rst new file mode 100644 index 00000000000..cac257679d4 --- /dev/null +++ b/docs/internals/reference/celery.backends.gcs.rst @@ -0,0 +1,11 @@ +========================================== + ``celery.backends.gcs`` +========================================== + +.. contents:: + :local: +.. currentmodule:: celery.backends.gcs + +.. automodule:: celery.backends.gcs + :members: + :undoc-members: diff --git a/docs/internals/reference/index.rst b/docs/internals/reference/index.rst index cd587b8ae76..483ea193444 100644 --- a/docs/internals/reference/index.rst +++ b/docs/internals/reference/index.rst @@ -40,6 +40,7 @@ celery.backends.filesystem celery.backends.cosmosdbsql celery.backends.s3 + celery.backends.gcs celery.app.trace celery.app.annotations celery.app.routes diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 2825c58434a..00893d4e230 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -731,6 +731,10 @@ Can be one of the following: Use the `S3`_ to store the results See :ref:`conf-s3-result-backend`. +* ``gcs`` + Use the `GCS`_ to store the results + See :ref:`conf-gcs-result-backend`. + .. warning: While the AMQP result backend is very efficient, you must make sure @@ -750,6 +754,7 @@ Can be one of the following: .. _`Consul`: https://consul.io/ .. _`AzureBlockBlob`: https://azure.microsoft.com/en-us/services/storage/blobs/ .. _`S3`: https://aws.amazon.com/s3/ +.. _`GCS`: https://cloud.google.com/storage/ .. setting:: result_backend_always_retry @@ -1798,6 +1803,96 @@ Default: 120. Timeout in seconds for reading of an azure block blob. +.. _conf-gcs-result-backend: + +GCS backend settings +-------------------- + +.. note:: + + This gcs backend driver requires :pypi:`google-cloud-storage`. + + To install, use :command:`gcs`: + + .. code-block:: console + + $ pip install celery[gcs] + + See :ref:`bundles` for information on combining multiple extension + requirements. + +GCS could be configured via the URL provided in :setting:`result_backend`, for example:: + + result_backend = 'gcs://mybucket/some-prefix?project=myproject&ttl=600' + +This backend requires the following configuration directives to be set: + +.. setting:: gcs_bucket + +``gcs_bucket`` +~~~~~~~~~~~~~~ + +Default: None. + +The gcs bucket name. For example:: + + gcs_bucket = 'bucket_name' + +.. setting:: gcs_project + +``gcs_project`` +~~~~~~~~~~~~~~~ + +Default: None. + +The gcs project name. For example:: + + gcs_project = 'test-project' + +.. setting:: gcs_base_path + +``gcs_base_path`` +~~~~~~~~~~~~~~~~~ + +Default: None. + +A base path in the gcs bucket to use to store all result keys. For example:: + + gcs_base_path = '/prefix' + +``gcs_ttl`` +~~~~~~~~~~~ + +Default: 0. + +The time to live in seconds for the results blobs. +Requires a GCS bucket with "Delete" Object Lifecycle Management action enabled. +Use it to automatically delete results from Cloud Storage Buckets. + +For example to auto remove results after 24 hours:: + + gcs_ttl = 86400 + +``gcs_threadpool_maxsize`` +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: 10. + +Threadpool size for GCS operations. Same value defines the connection pool size. +Allows to control the number of concurrent operations. For example:: + + gcs_threadpool_maxsize = 20 + +Example configuration +~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: python + + gcs_bucket = 'mybucket' + gcs_project = 'myproject' + gcs_base_path = '/celery_result_backend' + gcs_ttl = 86400 + .. _conf-elasticsearch-result-backend: Elasticsearch backend settings @@ -2821,7 +2916,7 @@ to the AMQP broker. If this is set to :const:`None`, we'll retry forever. ``broker_channel_error_retry`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. versionadded:: 5.3 @@ -2984,7 +3079,7 @@ prefetch count to its maximum allowable value following a connection loss to the broker. By default, this setting is enabled. Upon a connection loss, Celery will attempt to reconnect to the broker automatically, -provided the :setting:`broker_connection_retry_on_startup` or :setting:`broker_connection_retry` +provided the :setting:`broker_connection_retry_on_startup` or :setting:`broker_connection_retry` is not set to False. During the period of lost connection, the message broker does not keep track of the number of tasks already fetched. Therefore, to manage the task load effectively and prevent overloading, Celery reduces the prefetch count based on the number of tasks that are diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt new file mode 100644 index 00000000000..7f34beca1b6 --- /dev/null +++ b/requirements/extras/gcs.txt @@ -0,0 +1 @@ +google-cloud-storage>=2.10.0 diff --git a/requirements/test.txt b/requirements/test.txt index 531c44e209c..16a1c5311c8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,3 +14,4 @@ pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt +-r extras/gcs.txt diff --git a/setup.py b/setup.py index 6ffcdeb1c3a..aef46a1a15f 100755 --- a/setup.py +++ b/setup.py @@ -25,6 +25,7 @@ 'elasticsearch', 'eventlet', 'gevent', + 'gcs', 'librabbitmq', 'memcache', 'mongodb', diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py new file mode 100644 index 00000000000..c9ca167c22f --- /dev/null +++ b/t/unit/backends/test_gcs.py @@ -0,0 +1,162 @@ +from datetime import datetime +from unittest.mock import Mock, call, patch + +import pytest +from google.cloud.exceptions import NotFound + +from celery.backends.gcs import GCSBackend +from celery.exceptions import ImproperlyConfigured + + +class test_GCSBackend: + def setup_method(self): + self.app.conf.gcs_bucket = 'bucket' + self.app.conf.gcs_project = 'project' + + @pytest.fixture(params=['', 'test_folder/']) + def base_path(self, request): + return request.param + + @pytest.fixture(params=[86400, None]) + def ttl(self, request): + return request.param + + def test_missing_storage_module(self): + with patch('celery.backends.gcs.storage', None): + with pytest.raises(ImproperlyConfigured, match='You must install'): + GCSBackend(app=self.app) + + def test_missing_bucket(self): + self.app.conf.gcs_bucket = None + + with pytest.raises(ImproperlyConfigured, match='Missing bucket name'): + GCSBackend(app=self.app) + + def test_missing_project(self): + self.app.conf.gcs_project = None + + with pytest.raises(ImproperlyConfigured, match='Missing project'): + GCSBackend(app=self.app) + + def test_invalid_ttl(self): + self.app.conf.gcs_bucket = 'bucket' + self.app.conf.gcs_project = 'project' + self.app.conf.gcs_ttl = -1 + + with pytest.raises(ImproperlyConfigured, match='Invalid ttl'): + GCSBackend(app=self.app) + + def test_parse_url(self, base_path): + self.app.conf.gcs_bucket = None + self.app.conf.gcs_project = None + + backend = GCSBackend( + app=self.app, url=f'gcs://bucket/{base_path}?gcs_project=project' + ) + assert backend.bucket_name == 'bucket' + assert backend.base_path == base_path.strip('/') + + @patch.object(GCSBackend, '_is_bucket_lifecycle_rule_exists') + def test_ttl_missing_lifecycle_rule(self, mock_lifecycle): + self.app.conf.gcs_ttl = 86400 + + mock_lifecycle.return_value = False + with pytest.raises( + ImproperlyConfigured, match='Missing lifecycle rule' + ): + GCSBackend(app=self.app) + mock_lifecycle.assert_called_once() + + @patch.object(GCSBackend, '_get_blob') + def test_get_key(self, mock_get_blob, base_path): + self.app.conf.gcs_base_path = base_path + + mock_blob = Mock() + mock_get_blob.return_value = mock_blob + backend = GCSBackend(app=self.app) + backend.get(b"testkey1") + + mock_get_blob.assert_called_once_with('testkey1') + mock_blob.download_as_bytes.assert_called_once() + + @patch.object(GCSBackend, 'bucket') + @patch.object(GCSBackend, '_get_blob') + def test_set_key(self, mock_get_blob, mock_bucket_prop, base_path, ttl): + self.app.conf.gcs_base_path = base_path + self.app.conf.gcs_ttl = ttl + + mock_blob = Mock() + mock_get_blob.return_value = mock_blob + mock_bucket_prop.lifecycle_rules = [{'action': {'type': 'Delete'}}] + backend = GCSBackend(app=self.app) + backend.set('testkey', 'testvalue') + mock_get_blob.assert_called_once_with('testkey') + mock_blob.upload_from_string.assert_called_once_with( + 'testvalue', retry=backend._retry_policy + ) + if ttl: + assert mock_blob.custom_time >= datetime.utcnow() + + @patch.object(GCSBackend, '_get_blob') + def test_get_missing_key(self, mock_get_blob): + self.app.conf.gcs_bucket = 'bucket' + self.app.conf.gcs_project = 'project' + + mock_blob = Mock() + mock_get_blob.return_value = mock_blob + + mock_blob.download_as_bytes.side_effect = NotFound('not found') + gcs_backend = GCSBackend(app=self.app) + result = gcs_backend.get('some-key') + + assert result is None + + @patch.object(GCSBackend, '_get_blob') + def test_delete_existing_key(self, mock_get_blob, base_path): + self.app.conf.gcs_base_path = base_path + + mock_blob = Mock() + mock_get_blob.return_value = mock_blob + mock_blob.exists.return_value = True + backend = GCSBackend(app=self.app) + backend.delete(b"testkey2") + + mock_get_blob.assert_called_once_with('testkey2') + mock_blob.exists.assert_called_once() + mock_blob.delete.assert_called_once() + + @patch.object(GCSBackend, '_get_blob') + def test_delete_missing_key(self, mock_get_blob, base_path): + self.app.conf.gcs_base_path = base_path + + mock_blob = Mock() + mock_get_blob.return_value = mock_blob + mock_blob.exists.return_value = False + backend = GCSBackend(app=self.app) + backend.delete(b"testkey2") + + mock_get_blob.assert_called_once_with('testkey2') + mock_blob.exists.assert_called_once() + mock_blob.delete.assert_not_called() + + @patch.object(GCSBackend, 'get') + def test_mget(self, mock_get, base_path): + self.app.conf.gcs_base_path = base_path + backend = GCSBackend(app=self.app) + mock_get.side_effect = ['value1', 'value2'] + result = backend.mget([b'key1', b'key2']) + mock_get.assert_has_calls([call(b'key1'), call(b'key2')]) + assert result == ['value1', 'value2'] + + @patch('celery.backends.gcs.Client') + @patch('celery.backends.gcs.getpid') + def test_new_client_after_fork(self, mock_pid, mock_client): + mock_pid.return_value = 123 + backend = GCSBackend(app=self.app) + client1 = backend.client + mock_pid.assert_called() + mock_client.assert_called() + mock_pid.return_value = 456 + mock_client.return_value = Mock() + assert client1 != backend.client + mock_client.assert_called_with(project='project') From 5fbb79e881c489852614a0cc0d064cd032cb4b9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 2 Mar 2024 10:42:55 +0200 Subject: [PATCH 0665/1051] Bump msgpack from 1.0.7 to 1.0.8 (#8885) Bumps [msgpack](https://github.com/msgpack/msgpack-python) from 1.0.7 to 1.0.8. - [Release notes](https://github.com/msgpack/msgpack-python/releases) - [Changelog](https://github.com/msgpack/msgpack-python/blob/main/ChangeLog.rst) - [Commits](https://github.com/msgpack/msgpack-python/compare/v1.0.7...v1.0.8) --- updated-dependencies: - dependency-name: msgpack dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index 990f76ab16b..82308951b89 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.7 +msgpack==1.0.8 From 9edeab6b4526a59bf699df10f1b48ac65809eaea Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 4 Mar 2024 03:10:38 +0200 Subject: [PATCH 0666/1051] Update pytest from 8.0.2 to 8.1.0 --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 16a1c5311c8..54d52f4115b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.0.2 +pytest==8.1.0 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 From 178c282e90cac60d2534d6b0a0128792cc9fe06f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 11:49:50 +0200 Subject: [PATCH 0667/1051] Bump pytest-timeout from 2.2.0 to 2.3.1 (#8894) Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.2.0 to 2.3.1. - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.2.0...2.3.1) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 54d52f4115b..0663abc5338 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -2,7 +2,7 @@ pytest==8.1.0 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.11.0 -pytest-timeout==2.2.0 +pytest-timeout==2.3.1 pytest-click==1.1.0 pytest-order==1.2.0 boto3>=1.26.143 From 8f2698e1276b2fb8f39e4c6fb20c0865c07fa588 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:07:01 +0200 Subject: [PATCH 0668/1051] Bump pytest-subtests from 0.11.0 to 0.12.1 (#8896) Bumps [pytest-subtests](https://github.com/pytest-dev/pytest-subtests) from 0.11.0 to 0.12.1. - [Changelog](https://github.com/pytest-dev/pytest-subtests/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-subtests/compare/v0.11.0...v0.12.1) --- updated-dependencies: - dependency-name: pytest-subtests dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 0663abc5338..373c1d245ae 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,7 +1,7 @@ pytest==8.1.0 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 -pytest-subtests==0.11.0 +pytest-subtests==0.12.1 pytest-timeout==2.3.1 pytest-click==1.1.0 pytest-order==1.2.0 From 1f469588c2606bf6d8ee3625a6199365d5f27ff4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 9 Mar 2024 10:52:24 +0200 Subject: [PATCH 0669/1051] Bump mypy from 1.8.0 to 1.9.0 (#8898) Bumps [mypy](https://github.com/python/mypy) from 1.8.0 to 1.9.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.8.0...1.9.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 373c1d245ae..dd180d5caeb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.2.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.8.0; platform_python_implementation=="CPython" +mypy==1.9.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt From 79ec40abfe41b215ca41fa0d270e54842626d7d8 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sat, 9 Mar 2024 12:32:53 -0800 Subject: [PATCH 0670/1051] Update pytest from 8.1.0 to 8.1.1 (#8901) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index dd180d5caeb..826715e9c57 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.1.0 +pytest==8.1.1 pytest-celery==1.0.0b2 pytest-rerunfailures==13.0.0 pytest-subtests==0.12.1 From 3dada5016377e64dac246e2cc1e7091795d9733d Mon Sep 17 00:00:00 2001 From: Jeremy Hsu Date: Thu, 29 Feb 2024 01:11:25 -0500 Subject: [PATCH 0671/1051] Update upstream URL to SSH --- CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 82d5c918a05..f3ffbbdd3af 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -423,7 +423,7 @@ to upstream changes: .. code-block:: console $ cd celery - $ git remote add upstream git://github.com/celery/celery.git + $ git remote add upstream git@github.com:celery/celery.git $ git fetch upstream If you need to pull in new changes from upstream you should From 62a1a50fb71ae726e0693c91b61703a171153081 Mon Sep 17 00:00:00 2001 From: Benel Tayar <86257734+beneltayar@users.noreply.github.com> Date: Sun, 10 Mar 2024 18:09:29 +0200 Subject: [PATCH 0672/1051] Fix recursive result parents on group in middle of chain (#8903) * Fix recursive result parents on group in middle of chain * Add integration test --------- Co-authored-by: Benel Tayar --- celery/canvas.py | 6 ++++++ t/integration/test_canvas.py | 12 ++++++++++++ t/unit/tasks/test_canvas.py | 6 ++++++ 3 files changed, 24 insertions(+) diff --git a/celery/canvas.py b/celery/canvas.py index 70c7b139212..cb76a218013 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1225,6 +1225,12 @@ def prepare_steps(self, args, kwargs, tasks, task, body=prev_task, root_id=root_id, app=app, ) + if tasks: + prev_task = tasks[-1] + prev_res = results[-1] + else: + prev_task = None + prev_res = None if is_last_task: # chain(task_id=id) means task id is set for the last task diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 45cd24f6949..bb5b80ffa67 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1096,6 +1096,18 @@ def test_chaining_upgraded_chords_mixed_canvas_protocol_2( # Cleanup redis_connection.delete(redis_key, 'Done') + def test_group_in_center_of_chain(self, manager): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + t1 = chain(tsum.s(), group(add.s(8), add.s(16)), tsum.s() | add.s(32)) + t2 = chord([tsum, tsum], t1) + t3 = chord([add.s(0, 1)], t2) + res = t3.apply_async() # should not raise + assert res.get(timeout=TIMEOUT) == 60 + class test_result_set: diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index b4d03a56e3c..9bd4f6b75dd 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -819,6 +819,12 @@ def link_chain(sig): assert signature(flat_chain.tasks[1].options['link'][0]) == signature('link_b') assert signature(flat_chain.tasks[1].options['link_error'][0]) == signature('link_ab') + def test_group_in_center_of_chain(self): + t1 = chain(self.add.si(1, 1), group(self.add.si(1, 1), self.add.si(1, 1)), + self.add.si(1, 1) | self.add.si(1, 1)) + t2 = chord([self.add.si(1, 1), self.add.si(1, 1)], t1) + t2.freeze() # should not raise + class test_group(CanvasCase): def test_repr(self): From c3dac195f4728e4a23f686ba2102f78a1bbe5fc3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 11 Mar 2024 21:53:17 +0200 Subject: [PATCH 0673/1051] Bump pytest-celery to 1.0.0b4 (#8899) * Bump pytest-celery to 1.0.0b4 * Fixed test_max_prefetch_passed_on_broker_restart * Fixed test_max_prefetch_not_passed_on_broker_restart --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/tests/test_consumer.py | 6 ++++++ t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index ed4fe4a199f..ae0cb71690e 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery==1.0.0b1 +pytest-celery==1.0.0b4 diff --git a/requirements/test.txt b/requirements/test.txt index 826715e9c57..ef400111a77 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery==1.0.0b2 +pytest-celery==1.0.0b4 pytest-rerunfailures==13.0.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 6448946e6fa..c070b84c31a 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -74,6 +74,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -92,6 +95,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + pytest.xfail("Real Bug: Broker does not fetch messages after restart") + sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) r = sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index a0619761cc8..a34370e8055 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -24,7 +24,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache] \ - psutil + pytest-celery==1.0.0b4 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index be8c5871a45..b11f95667d7 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -23,7 +23,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - psutil + pytest-celery==1.0.0b4 # The workdir must be /app WORKDIR /app From d378cd98574c6641dd1cea6f26c9be0069ba735d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Mar 2024 16:32:53 +0200 Subject: [PATCH 0674/1051] Adjusted smoke tests CI time limit (#8907) --- .github/workflows/python-package.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8827da67018..ad7bd024373 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -158,7 +158,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 10 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover @@ -196,7 +196,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 15 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k stamping @@ -234,7 +234,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 5 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py @@ -272,7 +272,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 10 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py @@ -310,7 +310,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 5 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py @@ -348,7 +348,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 5 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py @@ -386,7 +386,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 10 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py @@ -424,7 +424,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 10 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py @@ -462,7 +462,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 60 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_worker.py From 764a1639d77c994aaa6d25a7cef26ef756e1f926 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 21:39:01 +0200 Subject: [PATCH 0675/1051] [pre-commit.ci] pre-commit autoupdate (#8908) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.8.0 → v1.9.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.8.0...v1.9.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ff2addbc262..6a51ec28c4e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.8.0 + rev: v1.9.0 hooks: - id: mypy pass_filenames: false From 743f33954c8d8930eda964f7d6d2b3639a3055ee Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 13 Mar 2024 09:32:53 -0700 Subject: [PATCH 0676/1051] Update pytest-rerunfailures from 13.0.0 to 14.0 (#8910) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index ef400111a77..2302293b077 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,6 @@ pytest==8.1.1 pytest-celery==1.0.0b4 -pytest-rerunfailures==13.0.0 +pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 pytest-click==1.1.0 From bb6ce11ba37a0be99c4207956c8ca705a4043fa2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 14 Mar 2024 22:36:40 +0200 Subject: [PATCH 0677/1051] Use the "all" extra for pytest-celery (#8911) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index ae0cb71690e..dcc0f219deb 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery==1.0.0b4 +pytest-celery[all]==1.0.0b4 diff --git a/requirements/test.txt b/requirements/test.txt index 2302293b077..4c697d496e3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery==1.0.0b4 +pytest-celery[all]==1.0.0b4 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 From bfbdcbaf60cd8c1653ebe5b58ac41526b5e1965a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Pe=C3=B1a?= Date: Mon, 18 Mar 2024 15:32:35 -0500 Subject: [PATCH 0678/1051] Fix typos and grammar (#8915) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos Peña --- .github/ISSUE_TEMPLATE/Enhancement.md | 2 +- celery/backends/filesystem.py | 2 +- celery/backends/rpc.py | 2 +- celery/canvas.py | 4 ++-- celery/concurrency/asynpool.py | 2 +- t/unit/app/test_backends.py | 2 +- t/unit/backends/test_redis.py | 2 +- t/unit/conftest.py | 2 +- t/unit/contrib/test_worker.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/Enhancement.md b/.github/ISSUE_TEMPLATE/Enhancement.md index 3174256ac14..363f4630628 100644 --- a/.github/ISSUE_TEMPLATE/Enhancement.md +++ b/.github/ISSUE_TEMPLATE/Enhancement.md @@ -22,7 +22,7 @@ To check an item on the list replace [ ] with [x]. - [ ] I have checked the [pull requests list](https://github.com/celery/celery/pulls?q=is%3Apr+label%3A%22Issue+Type%3A+Enhancement%22+-label%3A%22Category%3A+Documentation%22) for existing proposed enhancements. - [ ] I have checked the [commit log](https://github.com/celery/celery/commits/main) - to find out if the if the same enhancement was already implemented in the + to find out if the same enhancement was already implemented in the main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). diff --git a/celery/backends/filesystem.py b/celery/backends/filesystem.py index 22fd5dcfaad..1a624f3be62 100644 --- a/celery/backends/filesystem.py +++ b/celery/backends/filesystem.py @@ -50,7 +50,7 @@ def __init__(self, url=None, open=open, unlink=os.unlink, sep=os.sep, self.open = open self.unlink = unlink - # Lets verify that we've everything setup right + # Let's verify that we've everything setup right self._do_directory_test(b'.fs-backend-' + uuid().encode(encoding)) def __reduce__(self, args=(), kwargs=None): diff --git a/celery/backends/rpc.py b/celery/backends/rpc.py index 399c1dc7a20..927c7f517fa 100644 --- a/celery/backends/rpc.py +++ b/celery/backends/rpc.py @@ -222,7 +222,7 @@ def _to_result(self, task_id, state, result, traceback, request): def on_out_of_band_result(self, task_id, message): # Callback called when a reply for a task is received, - # but we have no idea what do do with it. + # but we have no idea what to do with it. # Since the result is not pending, we put it in a separate # buffer: probably it will become pending later. if self.result_consumer: diff --git a/celery/canvas.py b/celery/canvas.py index cb76a218013..cf322f3b8a1 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -396,7 +396,7 @@ def apply_async(self, args=None, kwargs=None, route_name=None, **options): else: args, kwargs, options = self.args, self.kwargs, self.options # pylint: disable=too-many-function-args - # Borks on this, as it's a property + # Works on this, as it's a property return _apply(args, kwargs, **options) def _merge(self, args=None, kwargs=None, options=None, force=False): @@ -515,7 +515,7 @@ def freeze(self, _id=None, group_id=None, chord=None, if group_index is not None: opts['group_index'] = group_index # pylint: disable=too-many-function-args - # Borks on this, as it's a property. + # Works on this, as it's a property. return self.AsyncResult(tid) _freeze = freeze diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index c024e685f8a..e1912b05b7a 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -194,7 +194,7 @@ def iterate_file_descriptors_safely(fds_iter, source_data, or possibly other reasons, so safely manage our lists of FDs. :param fds_iter: the file descriptors to iterate and apply hub_method :param source_data: data source to remove FD if it renders OSError - :param hub_method: the method to call with with each fd and kwargs + :param hub_method: the method to call with each fd and kwargs :*args to pass through to the hub_method; with a special syntax string '*fd*' represents a substitution for the current fd object in the iteration (for some callers). diff --git a/t/unit/app/test_backends.py b/t/unit/app/test_backends.py index df4e47af772..54b28456627 100644 --- a/t/unit/app/test_backends.py +++ b/t/unit/app/test_backends.py @@ -48,7 +48,7 @@ def embed_worker(app, Helper embedded worker for testing. It's based on a :func:`celery.contrib.testing.worker.start_worker`, - but doesn't modifies logging settings and additionally shutdown + but doesn't modify logging settings and additionally shutdown worker pool. """ # prepare application for worker diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py index dbb11db8e3e..876d747dde3 100644 --- a/t/unit/backends/test_redis.py +++ b/t/unit/backends/test_redis.py @@ -1171,7 +1171,7 @@ def test_on_chord_part_return( self.b.client.lrange.assert_not_called() # Confirm that the `GroupResult.restore` mock was called complex_header_result.assert_called_once_with(request.group) - # Confirm the the callback was called with the `join()`ed group result + # Confirm that the callback was called with the `join()`ed group result if supports_native_join: expected_join = mock_result_obj.join_native else: diff --git a/t/unit/conftest.py b/t/unit/conftest.py index e742a5c3ccc..ce6fbc032ce 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -106,7 +106,7 @@ def reset_cache_backend_state(celery_app): @contextmanager def assert_signal_called(signal, **expected): - """Context that verifes signal is called before exiting.""" + """Context that verifies signal is called before exiting.""" handler = Mock() def on_call(**kwargs): diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index c729f644264..e3ec8f9a8bf 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -28,7 +28,7 @@ def error_task(): }) # to avoid changing the root logger level to ERROR, - # we have we have to set both app.log.loglevel start_worker arg to 0 + # we have to set both app.log.loglevel start_worker arg to 0 # (see celery.app.log.setup_logging_subsystem) self.app.log.loglevel = 0 From 47f89e70cc3a9cdea16ede6f925e510071842067 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 20 Mar 2024 19:24:50 +0200 Subject: [PATCH 0679/1051] Bump pytest-celery to 1.0.0rc1 (#8918) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index dcc0f219deb..20dfa5bfd10 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]==1.0.0b4 +pytest-celery[all]==1.0.0rc1 diff --git a/requirements/test.txt b/requirements/test.txt index 4c697d496e3..540809bebde 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery[all]==1.0.0b4 +pytest-celery[all]==1.0.0rc1 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index a34370e8055..cc0129cb73d 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -24,7 +24,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache] \ - pytest-celery==1.0.0b4 + pytest-celery==1.0.0rc1 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index b11f95667d7..a7b76b2266c 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -23,7 +23,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery==1.0.0b4 + pytest-celery==1.0.0rc1 # The workdir must be /app WORKDIR /app From d74222a83656bf42243177214419e3ea72e2d4dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Leszczuk?= <146740242+lukasz-leszczuk-airspace-intelligence@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:32:16 +0100 Subject: [PATCH 0680/1051] Print safe_say() to stdout for non-error flows (#8919) --- celery/apps/worker.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/celery/apps/worker.py b/celery/apps/worker.py index dcc04dac25b..1556531e523 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -77,8 +77,8 @@ def active_thread_count(): if not t.name.startswith('Dummy-')) -def safe_say(msg): - print(f'\n{msg}', file=sys.__stderr__, flush=True) +def safe_say(msg, f=sys.__stderr__): + print(f'\n{msg}', file=f, flush=True) class Worker(WorkController): @@ -286,7 +286,7 @@ def _handle_request(*args): if current_process()._name == 'MainProcess': if callback: callback(worker) - safe_say(f'worker: {how} shutdown (MainProcess)') + safe_say(f'worker: {how} shutdown (MainProcess)', sys.__stdout__) signals.worker_shutting_down.send( sender=worker.hostname, sig=sig, how=how, exitcode=exitcode, @@ -317,7 +317,8 @@ def _handle_request(*args): def on_SIGINT(worker): - safe_say('worker: Hitting Ctrl+C again will terminate all running tasks!') + safe_say('worker: Hitting Ctrl+C again will terminate all running tasks!', + sys.__stdout__) install_worker_term_hard_handler(worker, sig='SIGINT') @@ -343,7 +344,8 @@ def install_worker_restart_handler(worker, sig='SIGHUP'): def restart_worker_sig_handler(*args): """Signal handler restarting the current python program.""" set_in_sighandler(True) - safe_say(f"Restarting celery worker ({' '.join(sys.argv)})") + safe_say(f"Restarting celery worker ({' '.join(sys.argv)})", + sys.__stdout__) import atexit atexit.register(_reload_current_worker) from celery.worker import state From 3265d484d4af75bf088c762bef93ee4e34f76ae0 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 25 Mar 2024 04:33:34 -0700 Subject: [PATCH 0681/1051] Update pytest-cov from 4.1.0 to 5.0.0 (#8924) --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 626cbbaf90c..6238dd48914 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,4 @@ -pytest-cov==4.1.0 +pytest-cov==5.0.0 pytest-github-actions-annotate-failures==0.2.0 -r extras/redis.txt -r extras/sqlalchemy.txt From 46992a11afe95e4149038406f456678fca6979d2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 19:35:29 +0200 Subject: [PATCH 0682/1051] [pre-commit.ci] pre-commit autoupdate (#8926) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.15.1 → v3.15.2](https://github.com/asottile/pyupgrade/compare/v3.15.1...v3.15.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a51ec28c4e..1b96df15b33 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.15.1 + rev: v3.15.2 hooks: - id: pyupgrade args: ["--py38-plus"] From b14c976b0335020dcd37a60a3ca41a00ef636b91 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 27 Mar 2024 16:48:16 +0200 Subject: [PATCH 0683/1051] Bump pytest-celery to 1.0.0rc2 (#8928) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 20dfa5bfd10..e3ec5e49756 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]==1.0.0rc1 +pytest-celery[all]==1.0.0rc2 diff --git a/requirements/test.txt b/requirements/test.txt index 540809bebde..98e8b289e66 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery[all]==1.0.0rc1 +pytest-celery[all]==1.0.0rc2 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index cc0129cb73d..9060eeabd71 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -24,7 +24,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache] \ - pytest-celery==1.0.0rc1 + pytest-celery==1.0.0rc2 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index a7b76b2266c..7c29f8d2553 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -23,7 +23,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery==1.0.0rc1 + pytest-celery==1.0.0rc2 # The workdir must be /app WORKDIR /app From af1d3210b6b9bfab5385662a8515d740874bdb2c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 27 Mar 2024 17:12:07 +0200 Subject: [PATCH 0684/1051] Added changelog for v5.4.0rc2 (#8932) --- Changelog.rst | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/Changelog.rst b/Changelog.rst index 35a0fff71b4..d076ba2244e 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,63 @@ This document contains change notes for bugfix & new features in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. +.. _version-5.4.0rc2: + +5.4.0rc2 +======== + +:release-date: 2024-03-27 +:release-by: Tomer Nosrati + +- feat(daemon): allows daemonization options to be fetched from app settings (#8553) +- Fixed version documentation tag from #8553 in configuration.rst (#8802) +- Upgraded Sphinx from v5.3.0 to v7.x.x (#8803) +- Update elasticsearch requirement from <=8.11.1 to <=8.12.0 (#8810) +- Update elastic-transport requirement from <=8.11.0 to <=8.12.0 (#8811) +- Update cryptography to 42.0.0 (#8814) +- Catch UnicodeDecodeError when opening corrupt beat-schedule.db (#8806) +- Update cryptography to 42.0.1 (#8817) +- Limit moto to <5.0.0 until the breaking issues are fixed (#8820) +- Enable efficient `chord` when using dynamicdb as backend store (#8783) +- Add a Task class specialised for Django (#8491) +- Sync kombu versions in requirements and setup.cfg (#8825) +- chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8826) +- Update cryptography to 42.0.2 (#8827) +- Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) +- Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) +- Support moto 5.0 (#8838) +- Another fix for `link_error` signatures being `dict`s instead of `Signature`s (#8841) +- Bump codecov/codecov-action from 3 to 4 (#8831) +- Upgrade from pytest-celery v1.0.0b1 -> v1.0.0b2 (#8843) +- Bump pytest from 7.4.4 to 8.0.0 (#8823) +- Update pre-commit to 3.6.1 (#8839) +- Update cryptography to 42.0.3 (#8854) +- Bump pytest from 8.0.0 to 8.0.1 (#8855) +- Update cryptography to 42.0.4 (#8864) +- Update pytest to 8.0.2 (#8870) +- Update cryptography to 42.0.5 (#8869) +- Update elasticsearch requirement from <=8.12.0 to <=8.12.1 (#8867) +- Eliminate consecutive chords generated by group | task upgrade (#8663) +- Make custom remote control commands available in CLI (#8489) +- Add Google Cloud Storage (GCS) backend (#8868) +- Bump msgpack from 1.0.7 to 1.0.8 (#8885) +- Update pytest to 8.1.0 (#8886) +- Bump pytest-timeout from 2.2.0 to 2.3.1 (#8894) +- Bump pytest-subtests from 0.11.0 to 0.12.1 (#8896) +- Bump mypy from 1.8.0 to 1.9.0 (#8898) +- Update pytest to 8.1.1 (#8901) +- Update contributing guide to use ssh upstream url (#8881) +- Fix recursive result parents on group in middle of chain (#8903) +- Bump pytest-celery to 1.0.0b4 (#8899) +- Adjusted smoke tests CI time limit (#8907) +- Update pytest-rerunfailures to 14.0 (#8910) +- Use the "all" extra for pytest-celery (#8911) +- Fix typos and grammar (#8915) +- Bump pytest-celery to 1.0.0rc1 (#8918) +- Print safe_say() to stdout for non-error flows (#8919) +- Update pytest-cov to 5.0.0 (#8924) +- Bump pytest-celery to 1.0.0rc2 (#8928) + .. _version-5.4.0rc1: 5.4.0rc1 From ab19e712bff5a0e0bd36c41e45eeebe222eb3e8d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 27 Mar 2024 17:12:52 +0200 Subject: [PATCH 0685/1051] =?UTF-8?q?Bump=20version:=205.4.0rc1=20?= =?UTF-8?q?=E2=86=92=205.4.0rc2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f82cfbd7d53..dccdb437f6b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.4.0rc1 +current_version = 5.4.0rc2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 28a5dbcc3e4..6a8c12f5930 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.4.0rc1 (opalescent) +:Version: 5.4.0rc2 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 7212e277efc..9894bc7e322 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'opalescent' -__version__ = '5.4.0rc1' +__version__ = '5.4.0rc2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index e3df2ded029..6de1f1f9ea0 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.4.0rc1 (opalescent) +:Version: 5.4.0rc2 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 392d534d892f4f58f767c62b6df4449567765db8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 10:06:37 +0200 Subject: [PATCH 0686/1051] Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.13.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 3a5f5003b57..ba84c72db2b 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.12.1 -elastic-transport<=8.12.0 +elastic-transport<=8.13.0 From c3a988c1e532557fd383f383ea3fa3466e5d85ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 13:21:02 +0200 Subject: [PATCH 0687/1051] Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.13.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index ba84c72db2b..d32c9d2108e 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.12.1 +elasticsearch<=8.13.0 elastic-transport<=8.13.0 From 010af00acd02200ee4fd0acaa7a354cbf0b75e55 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 1 Apr 2024 16:56:29 +0300 Subject: [PATCH 0688/1051] Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) --- t/smoke/workers/dev.py | 1 + t/smoke/workers/docker/dev | 15 ++++++++++++++- t/smoke/workers/docker/pypi | 15 ++++++++++++++- 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 13901729240..edd27325d5e 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -53,6 +53,7 @@ def worker_queue(cls) -> str: }, wrapper_class=SmokeWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, + command=fxtr("default_worker_command"), ) diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 9060eeabd71..3bc9d12dfb8 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -4,7 +4,20 @@ FROM python:3.11-bookworm RUN adduser --disabled-password --gecos "" test_user # Install system dependencies -RUN apt-get update && apt-get install -y build-essential +RUN apt-get update && apt-get install -y build-essential \ + git \ + wget \ + make \ + curl \ + apt-utils \ + debconf \ + lsb-release \ + libmemcached-dev \ + libffi-dev \ + ca-certificates \ + pypy3 \ + pypy3-lib \ + sudo # Set arguments ARG CELERY_LOG_LEVEL=INFO diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 7c29f8d2553..05f58da7728 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -4,7 +4,20 @@ FROM python:3.10-bookworm RUN adduser --disabled-password --gecos "" test_user # Install system dependencies -RUN apt-get update && apt-get install -y build-essential +RUN apt-get update && apt-get install -y build-essential \ + git \ + wget \ + make \ + curl \ + apt-utils \ + debconf \ + lsb-release \ + libmemcached-dev \ + libffi-dev \ + ca-certificates \ + pypy3 \ + pypy3-lib \ + sudo # Set arguments ARG CELERY_VERSION="" From 6e50deb49a866c5b30d40f620e566711bf767f37 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 5 Apr 2024 20:25:47 +0300 Subject: [PATCH 0689/1051] Bump pytest-celery to 1.0.0rc3 (#8946) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/workers/dev.py | 1 + t/smoke/workers/docker/dev | 4 +++- t/smoke/workers/docker/pypi | 4 +++- 5 files changed, 9 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index e3ec5e49756..c5b2013cea1 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]==1.0.0rc2 +pytest-celery[all]==1.0.0rc3 diff --git a/requirements/test.txt b/requirements/test.txt index 98e8b289e66..1709f456df1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery[all]==1.0.0rc2 +pytest-celery[all]==1.0.0rc3 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index edd27325d5e..6a690adf55b 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -40,6 +40,7 @@ def worker_queue(cls) -> str: default_worker_container = container( image="{celery_dev_worker_image.id}", + ports=fxtr("default_worker_ports"), environment=fxtr("default_worker_env"), network="{default_pytest_celery_network.name}", volumes={ diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 3bc9d12dfb8..3bd71ec7b8f 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -30,6 +30,8 @@ ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE ENV PYTHONUNBUFFERED=1 ENV PYTHONDONTWRITEBYTECODE=1 +EXPOSE 5678 + # Install celery from source WORKDIR /celery @@ -37,7 +39,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache] \ - pytest-celery==1.0.0rc2 + pytest-celery==1.0.0rc3 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 05f58da7728..1b2e533403a 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -32,11 +32,13 @@ ENV WORKER_QUEUE=$CELERY_WORKER_QUEUE ENV PYTHONUNBUFFERED=1 ENV PYTHONDONTWRITEBYTECODE=1 +EXPOSE 5678 + # Install Python dependencies RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery==1.0.0rc2 + pytest-celery==1.0.0rc3 # The workdir must be /app WORKDIR /app From 481cf8e19ca2b5d34eb74a6954974e9dc92ebda3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 22:53:50 +0300 Subject: [PATCH 0690/1051] [pre-commit.ci] pre-commit autoupdate (#8950) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1b96df15b33..2e6ce34bbc7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-merge-conflict - id: check-toml From afeeff8754cff13103526de0aec60800b8603047 Mon Sep 17 00:00:00 2001 From: Alex McLarty Date: Wed, 10 Apr 2024 11:05:15 +0100 Subject: [PATCH 0691/1051] Update optimizing.rst (#8945) Remove dead link to Jon Bentley's book Programming Pearls. Tried to find a free resource, but none appear to exist. --- docs/userguide/optimizing.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/userguide/optimizing.rst b/docs/userguide/optimizing.rst index 4372f3af199..346f7374794 100644 --- a/docs/userguide/optimizing.rst +++ b/docs/userguide/optimizing.rst @@ -18,7 +18,7 @@ responsiveness at times of high load. Ensuring Operations =================== -In the book `Programming Pearls`_, Jon Bentley presents the concept of +In the book Programming Pearls, Jon Bentley presents the concept of back-of-the-envelope calculations by asking the question; ❝ How much water flows out of the Mississippi River in a day? ❞ @@ -38,8 +38,6 @@ You should set up alerts, that'll notify you as soon as any queue has reached an unacceptable size. This way you can take appropriate action like adding new worker nodes, or revoking unnecessary tasks. -.. _`Programming Pearls`: http://www.cs.bell-labs.com/cm/cs/pearls/ - .. _`The back of the envelope`: http://books.google.com/books?id=kse_7qbWbjsC&pg=PA67 From 2acc150ef8a617fc108ee7a461cce2a2a357a98b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 11 Apr 2024 13:03:32 +0300 Subject: [PATCH 0692/1051] Doc: Enhance "Testing with Celery" section (#8955) --- docs/userguide/testing.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index 4c83e350ffc..5b2a5761818 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -4,6 +4,23 @@ Testing with Celery ================================================================ +Testing with Celery is divided into two parts: + + * Unit & Integration: Using ``celery.contrib.pytest``. + * Smoke / Production: Using :pypi:`pytest-celery ` >= 1.0.0 + +Installing the pytest-celery plugin will install the ``celery.contrib.pytest`` infrastructure as well, +alongside the pytest plugin infrastructure. The difference is how you use it. + +.. warning:: + + Both APIs are NOT compatible with each other. The pytest-celery plugin is Docker based + and the ``celery.contrib.pytest`` is mock based. + +To use the ``celery.contrib.pytest`` infrastructure, follow the instructions below. + +The pytest-celery plugin has its `own documentation `_. + Tasks and unit tests ==================== From 8ad421ce7a135d0521b88484d50e40f329556efb Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 12 Apr 2024 04:18:49 +0300 Subject: [PATCH 0693/1051] Bump pytest-celery to v1.0.0 (#8962) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index c5b2013cea1..d559eb3eb16 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]==1.0.0rc3 +pytest-celery[all]>=1.0.0 diff --git a/requirements/test.txt b/requirements/test.txt index 1709f456df1..c39c5eedf20 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.1.1 -pytest-celery[all]==1.0.0rc3 +pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 3bd71ec7b8f..82427c19573 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -39,7 +39,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache] \ - pytest-celery==1.0.0rc3 + pytest-celery>=1.0.0 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 1b2e533403a..699f290e119 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -38,7 +38,7 @@ EXPOSE 5678 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery==1.0.0rc3 + pytest-celery>=1.0.0 # The workdir must be /app WORKDIR /app From 3e018cb7e7f7f1bbf9f0559bcb9e055f355ea0ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 14 Apr 2024 23:06:16 +0300 Subject: [PATCH 0694/1051] Bump pytest-order from 1.2.0 to 1.2.1 (#8941) Bumps [pytest-order](https://github.com/pytest-dev/pytest-order) from 1.2.0 to 1.2.1. - [Release notes](https://github.com/pytest-dev/pytest-order/releases) - [Changelog](https://github.com/pytest-dev/pytest-order/blob/main/CHANGELOG.md) - [Commits](https://github.com/pytest-dev/pytest-order/compare/v1.2.0...v1.2.1) --- updated-dependencies: - dependency-name: pytest-order dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index c39c5eedf20..ddff18a7c33 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ pytest-rerunfailures==14.0 pytest-subtests==0.12.1 pytest-timeout==2.3.1 pytest-click==1.1.0 -pytest-order==1.2.0 +pytest-order==1.2.1 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions From 54df3e9a686149430f796083f311e9972c253668 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Apr 2024 16:52:39 +0300 Subject: [PATCH 0695/1051] Added documentation to the smoke tests infra (#8970) --- t/smoke/conftest.py | 21 +++++++++++++++++++-- t/smoke/operations/task_termination.py | 10 ++++++++++ t/smoke/operations/worker_kill.py | 10 +++++++++- t/smoke/operations/worker_restart.py | 10 +++++++++- t/smoke/signals.py | 12 +++++++----- t/smoke/tasks.py | 2 ++ t/smoke/workers/alt.py | 6 ++++++ t/smoke/workers/dev.py | 17 +++++++++++++++++ t/smoke/workers/latest.py | 9 +++++++++ t/smoke/workers/other.py | 6 ++++++ 10 files changed, 94 insertions(+), 9 deletions(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 4a00ff63fb4..c7f856fef3a 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -18,11 +18,20 @@ class SuiteOperations( WorkerKill, WorkerRestart, ): - pass + """Optional operations that can be performed with different methods, + shared across the smoke tests suite. + + Example Usage: + >>> class test_mysuite(SuiteOperations): + >>> def test_something(self): + >>> self.prepare_worker_with_conditions() + >>> assert condition are met + """ @pytest.fixture def default_worker_tasks(default_worker_tasks: set) -> set: + """Use all of the integration and smoke suites tasks in the smoke tests workers.""" from t.integration import tasks as integration_tests_tasks from t.smoke import tasks as smoke_tests_tasks @@ -31,6 +40,10 @@ def default_worker_tasks(default_worker_tasks: set) -> set: return default_worker_tasks +# When using integration tests tasks that requires a Redis instance, +# we use pytest-celery to raise a dedicated Redis container for the smoke tests suite that is configured +# to be used by the integration tests tasks. + redis_image = fetch(repository=REDIS_IMAGE) redis_test_container_network = network(scope="session") redis_test_container: RedisContainer = container( @@ -44,6 +57,10 @@ def default_worker_tasks(default_worker_tasks: set) -> set: ) -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture( + scope="session", + autouse=True, # Ensure the configuration is applied automatically +) def set_redis_test_container(redis_test_container: RedisContainer): + """Configure the Redis test container to be used by the integration tests tasks.""" os.environ["REDIS_PORT"] = str(redis_test_container.port) diff --git a/t/smoke/operations/task_termination.py b/t/smoke/operations/task_termination.py index 98d2c5fc2e6..49acf518df8 100644 --- a/t/smoke/operations/task_termination.py +++ b/t/smoke/operations/task_termination.py @@ -11,6 +11,7 @@ class TaskTermination: + """Terminates a task in different ways.""" class Method(Enum): SIGKILL = auto() SYSTEM_EXIT = auto() @@ -22,6 +23,15 @@ def apply_self_termination_task( worker: CeleryTestWorker, method: TaskTermination.Method, ) -> AsyncResult: + """Apply a task that will terminate itself. + + Args: + worker (CeleryTestWorker): Take the queue of this worker. + method (TaskTermination.Method): The method to terminate the task. + + Returns: + AsyncResult: The result of applying the task. + """ try: self_termination_sig: Signature = { TaskTermination.Method.SIGKILL: self_termination_sigkill.si(), diff --git a/t/smoke/operations/worker_kill.py b/t/smoke/operations/worker_kill.py index 6a4af26b383..7c4b2583e3f 100644 --- a/t/smoke/operations/worker_kill.py +++ b/t/smoke/operations/worker_kill.py @@ -8,6 +8,7 @@ class WorkerKill: + """Kills a worker in different ways.""" class Method(Enum): DOCKER_KILL = auto() CONTROL_SHUTDOWN = auto() @@ -17,7 +18,14 @@ def kill_worker( worker: CeleryTestWorker, method: WorkerKill.Method, assertion: bool = True, - ): + ) -> None: + """Kill a Celery worker. + + Args: + worker (CeleryTestWorker): Worker to kill. + method (WorkerKill.Method): The method to kill the worker. + assertion (bool, optional): Whether to assert the worker state after kill. Defaults to True. + """ if method == WorkerKill.Method.DOCKER_KILL: worker.kill() diff --git a/t/smoke/operations/worker_restart.py b/t/smoke/operations/worker_restart.py index 58d87c9def0..b443bd1f0b2 100644 --- a/t/smoke/operations/worker_restart.py +++ b/t/smoke/operations/worker_restart.py @@ -6,6 +6,7 @@ class WorkerRestart: + """Restarts a worker in different ways.""" class Method(Enum): POOL_RESTART = auto() DOCKER_RESTART_GRACEFULLY = auto() @@ -16,7 +17,14 @@ def restart_worker( worker: CeleryTestWorker, method: WorkerRestart.Method, assertion: bool = True, - ): + ) -> None: + """Restart a Celery worker. + + Args: + worker (CeleryTestWorker): Worker to restart. + method (WorkerRestart.Method): The method to restart the worker. + assertion (bool, optional): Whether to assert the worker state after restart. Defaults to True. + """ if method == WorkerRestart.Method.POOL_RESTART: worker.app.control.pool_restart() worker.container.reload() diff --git a/t/smoke/signals.py b/t/smoke/signals.py index 298c12e17d3..a43ee2288d0 100644 --- a/t/smoke/signals.py +++ b/t/smoke/signals.py @@ -1,26 +1,28 @@ +"""Signal Handlers for the smoke test.""" + from celery.signals import worker_init, worker_process_init, worker_process_shutdown, worker_ready, worker_shutdown @worker_init.connect -def worker_init_handler(sender, **kwargs): # type: ignore +def worker_init_handler(sender, **kwargs): print("worker_init_handler") @worker_process_init.connect -def worker_process_init_handler(sender, **kwargs): # type: ignore +def worker_process_init_handler(sender, **kwargs): print("worker_process_init_handler") @worker_process_shutdown.connect -def worker_process_shutdown_handler(sender, pid, exitcode, **kwargs): # type: ignore +def worker_process_shutdown_handler(sender, pid, exitcode, **kwargs): print("worker_process_shutdown_handler") @worker_ready.connect -def worker_ready_handler(sender, **kwargs): # type: ignore +def worker_ready_handler(sender, **kwargs): print("worker_ready_handler") @worker_shutdown.connect -def worker_shutdown_handler(sender, **kwargs): # type: ignore +def worker_shutdown_handler(sender, **kwargs): print("worker_shutdown_handler") diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index fcaffb2779a..6314dd11865 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -1,3 +1,5 @@ +"""Smoke tests tasks.""" + from __future__ import annotations import os diff --git a/t/smoke/workers/alt.py b/t/smoke/workers/alt.py index 63dbd673d67..a79778e1041 100644 --- a/t/smoke/workers/alt.py +++ b/t/smoke/workers/alt.py @@ -11,11 +11,14 @@ class AltSmokeWorkerContainer(SmokeWorkerContainer): + """Alternative worker with different name, but same configurations.""" + @classmethod def worker_name(cls) -> str: return "alt_smoke_tests_worker" +# Build the image like the dev worker celery_alt_dev_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/dev", @@ -24,6 +27,7 @@ def worker_name(cls) -> str: ) +# Define container settings like the dev worker alt_dev_worker_container = container( image="{celery_alt_dev_worker_image.id}", environment=fxtr("default_worker_env"), @@ -39,6 +43,7 @@ def worker_name(cls) -> str: }, wrapper_class=AltSmokeWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, + command=AltSmokeWorkerContainer.command(), ) @@ -47,6 +52,7 @@ def celery_alt_dev_worker( alt_dev_worker_container: AltSmokeWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: + """Creates a pytest-celery worker node from the worker container.""" worker = CeleryTestWorker(alt_dev_worker_container, app=celery_setup_app) yield worker worker.teardown() diff --git a/t/smoke/workers/dev.py b/t/smoke/workers/dev.py index 6a690adf55b..70bd4a41e98 100644 --- a/t/smoke/workers/dev.py +++ b/t/smoke/workers/dev.py @@ -9,6 +9,11 @@ class SmokeWorkerContainer(CeleryWorkerContainer): + """Defines the configurations for the smoke tests worker container. + + This worker will install Celery from the current source code. + """ + @property def client(self) -> Any: return self @@ -30,6 +35,7 @@ def worker_queue(cls) -> str: return "smoke_tests_queue" +# Build the image from the current source code celery_dev_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/dev", @@ -38,6 +44,7 @@ def worker_queue(cls) -> str: ) +# Define container settings default_worker_container = container( image="{celery_dev_worker_image.id}", ports=fxtr("default_worker_ports"), @@ -60,9 +67,19 @@ def worker_queue(cls) -> str: @pytest.fixture def default_worker_container_cls() -> Type[CeleryWorkerContainer]: + """Replace the default pytest-celery worker container with the smoke tests worker container. + + This will allow the default fixtures of pytest-celery to use the custom worker + configuration using the vendor class. + """ return SmokeWorkerContainer @pytest.fixture(scope="session") def default_worker_container_session_cls() -> Type[CeleryWorkerContainer]: + """Replace the default pytest-celery worker container with the smoke tests worker container. + + This will allow the default fixtures of pytest-celery to use the custom worker + configuration using the vendor class. + """ return SmokeWorkerContainer diff --git a/t/smoke/workers/latest.py b/t/smoke/workers/latest.py index c922e98e6ef..b53f3ad502f 100644 --- a/t/smoke/workers/latest.py +++ b/t/smoke/workers/latest.py @@ -8,6 +8,11 @@ class CeleryLatestWorkerContainer(CeleryWorkerContainer): + """Defines the configurations for a Celery worker container. + + This worker will install the latest version of Celery from PyPI. + """ + @property def client(self) -> Any: return self @@ -25,6 +30,7 @@ def worker_queue(cls) -> str: return "celery_latest_tests_queue" +# Build the image from the PyPI Dockerfile celery_latest_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/pypi", @@ -33,6 +39,7 @@ def worker_queue(cls) -> str: ) +# Define container settings celery_latest_worker_container = container( image="{celery_latest_worker_image.id}", environment=fxtr("default_worker_env"), @@ -40,6 +47,7 @@ def worker_queue(cls) -> str: volumes={"{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME}, wrapper_class=CeleryLatestWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, + command=CeleryLatestWorkerContainer.command(), ) @@ -48,6 +56,7 @@ def celery_latest_worker( celery_latest_worker_container: CeleryLatestWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: + """Creates a pytest-celery worker node from the worker container.""" worker = CeleryTestWorker(celery_latest_worker_container, app=celery_setup_app) yield worker worker.teardown() diff --git a/t/smoke/workers/other.py b/t/smoke/workers/other.py index 28a24cb38c0..ed0f421050b 100644 --- a/t/smoke/workers/other.py +++ b/t/smoke/workers/other.py @@ -11,6 +11,8 @@ class OtherSmokeWorkerContainer(SmokeWorkerContainer): + """Alternative worker with different name and queue, but same configurations for the rest.""" + @classmethod def worker_name(cls) -> str: return "other_smoke_tests_worker" @@ -20,6 +22,7 @@ def worker_queue(cls) -> str: return "other_smoke_tests_queue" +# Build the image like the dev worker celery_other_dev_worker_image = build( path=".", dockerfile="t/smoke/workers/docker/dev", @@ -28,6 +31,7 @@ def worker_queue(cls) -> str: ) +# Define container settings like the dev worker other_dev_worker_container = container( image="{celery_other_dev_worker_image.id}", environment=fxtr("default_worker_env"), @@ -43,6 +47,7 @@ def worker_queue(cls) -> str: }, wrapper_class=OtherSmokeWorkerContainer, timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, + command=OtherSmokeWorkerContainer.command(), ) @@ -51,6 +56,7 @@ def celery_other_dev_worker( other_dev_worker_container: OtherSmokeWorkerContainer, celery_setup_app: Celery, ) -> CeleryTestWorker: + """Creates a pytest-celery worker node from the worker container.""" worker = CeleryTestWorker(other_dev_worker_container, app=celery_setup_app) yield worker worker.teardown() From d0aae6550a2cd5681b276c7f4f88b2ff7bd73e54 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Apr 2024 17:16:24 +0300 Subject: [PATCH 0696/1051] Added a checklist item for using pytest-celery in a bug report (#8971) --- .github/ISSUE_TEMPLATE/Bug-Report.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/Bug-Report.md b/.github/ISSUE_TEMPLATE/Bug-Report.md index b38217f9add..6ec1556e0b7 100644 --- a/.github/ISSUE_TEMPLATE/Bug-Report.md +++ b/.github/ISSUE_TEMPLATE/Bug-Report.md @@ -29,6 +29,7 @@ To check an item on the list replace [ ] with [x]. to find out if the bug was already fixed in the main branch. - [ ] I have included all related issues and possible duplicate issues in this issue (If there are none, check this box anyway). +- [ ] I have tried to reproduce the issue with [pytest-celery](https://docs.celeryq.dev/projects/pytest-celery/en/latest/userguide/celery-bug-report.html) and added the reproduction script below. ## Mandatory Debugging Information @@ -137,6 +138,10 @@ We prefer submitting test cases in the form of a PR to our integration test suit If you can provide one, please mention the PR number below. If not, please attach the most minimal code example required to reproduce the issue below. If the test case is too large, please include a link to a gist or a repository below. + +Alternatively, the pytest-celery plugin can be used to create standalone reproduction scripts +that can be added to this report. See the pytest-celery documentation for more information at +pytest-celery.readthedocs.io -->
From a5accc212fc456acce97131f0c79562aca083643 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Apr 2024 22:42:10 +0300 Subject: [PATCH 0697/1051] Added changelog for v5.4.0 (#8973) --- Changelog.rst | 91 ++++++++++++- docs/history/index.rst | 1 + docs/history/whatsnew-5.4.rst | 233 ++++++++++++++++++++++++++++++++++ 3 files changed, 322 insertions(+), 3 deletions(-) create mode 100644 docs/history/whatsnew-5.4.rst diff --git a/Changelog.rst b/Changelog.rst index d076ba2244e..a410e35ecb9 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -5,8 +5,91 @@ ================ This document contains change notes for bugfix & new features -in the main branch & 5.3.x series, please see :ref:`whatsnew-5.3` for -an overview of what's new in Celery 5.3. +in the main branch & 5.4.x series, please see :ref:`whatsnew-5.4` for +an overview of what's new in Celery 5.4. + +.. _version-5.4.0: + +5.4.0 +===== + +:release-date: 2024-04-17 +:release-by: Tomer Nosrati + +Celery v5.4.0 and v5.3.x have consistently focused on enhancing the overall QA, both internally and externally. +This effort led to the new pytest-celery v1.0.0 release, developed concurrently with v5.3.0 & v5.4.0. + +This release introduces two significant QA enhancements: + +- **Smoke Tests**: A new layer of automatic tests has been added to Celery's standard CI. These tests are designed to handle production scenarios and complex conditions efficiently. While new contributions will not be halted due to the lack of smoke tests, we will request smoke tests for advanced changes where appropriate. +- `Standalone Bug Report Script `_: The new pytest-celery plugin now allows for encapsulating a complete Celery dockerized setup within a single pytest script. Incorporating these into new bug reports will enable us to reproduce reported bugs deterministically, potentially speeding up the resolution process. + +Contrary to the positive developments above, there have been numerous reports about issues with the Redis broker malfunctioning +upon restarts and disconnections. Our initial attempts to resolve this were not successful (#8796). +With our enhanced QA capabilities, we are now prepared to address the core issue with Redis (as a broker) again. + +The rest of the changes for this release are grouped below, with the changes from the latest release candidate listed at the end. + +Changes +------- +- Add a Task class specialised for Django (#8491) +- Add Google Cloud Storage (GCS) backend (#8868) +- Added documentation to the smoke tests infra (#8970) +- Added a checklist item for using pytest-celery in a bug report (#8971) +- Bugfix: Missing id on chain (#8798) +- Bugfix: Worker not consuming tasks after Redis broker restart (#8796) +- Catch UnicodeDecodeError when opening corrupt beat-schedule.db (#8806) +- chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8826) +- Doc: Enhance "Testing with Celery" section (#8955) +- Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) +- Enable efficient `chord` when using dynamicdb as backend store (#8783) +- feat(daemon): allows daemonization options to be fetched from app settings (#8553) +- Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) +- Fix recursive result parents on group in middle of chain (#8903) +- Fix typos and grammar (#8915) +- Fixed version documentation tag from #8553 in configuration.rst (#8802) +- Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) +- Make custom remote control commands available in CLI (#8489) +- Print safe_say() to stdout for non-error flows (#8919) +- Support moto 5.0 (#8838) +- Update contributing guide to use ssh upstream url (#8881) +- Update optimizing.rst (#8945) +- Updated concurrency docs page. (#8753) + +Dependencies Updates +-------------------- +- Bump actions/setup-python from 4 to 5 (#8701) +- Bump codecov/codecov-action from 3 to 4 (#8831) +- Bump isort from 5.12.0 to 5.13.2 (#8772) +- Bump msgpack from 1.0.7 to 1.0.8 (#8885) +- Bump mypy from 1.8.0 to 1.9.0 (#8898) +- Bump pre-commit to 3.6.1 (#8839) +- Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) +- Bump pytest from 8.0.2 to 8.1.1 (#8901) +- Bump pytest-celery to v1.0.0 (#8962) +- Bump pytest-cov to 5.0.0 (#8924) +- Bump pytest-order from 1.2.0 to 1.2.1 (#8941) +- Bump pytest-subtests from 0.11.0 to 0.12.1 (#8896) +- Bump pytest-timeout from 2.2.0 to 2.3.1 (#8894) +- Bump python-memcached from 1.59 to 1.61 (#8776) +- Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) +- Update cryptography to 42.0.5 (#8869) +- Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) +- Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) +- Upgraded Sphinx from v5.3.0 to v7.x.x (#8803) + +Changes since 5.4.0rc2 +---------------------- +- Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) +- Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) +- Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) +- Bump pytest-celery to 1.0.0rc3 (#8946) +- Update optimizing.rst (#8945) +- Doc: Enhance "Testing with Celery" section (#8955) +- Bump pytest-celery to v1.0.0 (#8962) +- Bump pytest-order from 1.2.0 to 1.2.1 (#8941) +- Added documentation to the smoke tests infra (#8970) +- Added a checklist item for using pytest-celery in a bug report (#8971) .. _version-5.4.0rc2: @@ -33,7 +116,7 @@ an overview of what's new in Celery 5.3. - Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) - Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) - Support moto 5.0 (#8838) -- Another fix for `link_error` signatures being `dict`s instead of `Signature`s (#8841) +- Another fix for `link_error` signatures being `dict`s instead of `Signature` s (#8841) - Bump codecov/codecov-action from 3 to 4 (#8831) - Upgrade from pytest-celery v1.0.0b1 -> v1.0.0b2 (#8843) - Bump pytest from 7.4.4 to 8.0.0 (#8823) @@ -258,6 +341,8 @@ The code changes are mostly fix for regressions. More details can be found below - Revert "Add Semgrep to CI" (#8477) - Revert "Revert "Add Semgrep to CI"" (#8478) +.. _CELERY: + .. _version-5.3.3: 5.3.3 (Yanked) diff --git a/docs/history/index.rst b/docs/history/index.rst index b0c39767826..496059e22b4 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -13,6 +13,7 @@ version please visit :ref:`changelog`. .. toctree:: :maxdepth: 2 + whatsnew-5.4 whatsnew-5.3 whatsnew-5.1 changelog-5.1 diff --git a/docs/history/whatsnew-5.4.rst b/docs/history/whatsnew-5.4.rst new file mode 100644 index 00000000000..403c3df3e4e --- /dev/null +++ b/docs/history/whatsnew-5.4.rst @@ -0,0 +1,233 @@ +.. _whatsnew-5.4: + +========================================= + What's new in Celery 5.4 (Opalescent) +========================================= +:Author: Tomer Nosrati (``tomer.nosrati at gmail.com``). + +.. sidebar:: Change history + + What's new documents describe the changes in major versions, + we also have a :ref:`changelog` that lists the changes in bugfix + releases (0.0.x), while older series are archived under the :ref:`history` + section. + +Celery is a simple, flexible, and reliable distributed programming framework +to process vast amounts of messages, while providing operations with +the tools required to maintain a distributed system with python. + +It's a task queue with focus on real-time processing, while also +supporting task scheduling. + +Celery has a large and diverse community of users and contributors, +you should come join us :ref:`on IRC ` +or :ref:`our mailing-list `. + +.. note:: + + Following the problems with Freenode, we migrated our IRC channel to Libera Chat + as most projects did. + You can also join us using `Gitter `_. + + We're sometimes there to answer questions. We welcome you to join. + +To read more about Celery you should go read the :ref:`introduction `. + +While this version is **mostly** backward compatible with previous versions +it's important that you read the following section as this release +is a new major version. + +This version is officially supported on CPython 3.8, 3.9 & 3.10 +and is also supported on PyPy3.8+. + +.. _`website`: https://docs.celeryq.dev/en/stable/ + +.. topic:: Table of Contents + + Make sure you read the important notes before upgrading to this version. + +.. contents:: + :local: + :depth: 2 + +Preface +======= + +.. note:: + + **This release contains fixes for many long standing bugs & stability issues. + We encourage our users to upgrade to this release as soon as possible.** + +The 5.4.0 release is a new feature release for Celery. + +Releases in the 5.x series are codenamed after songs of `Jon Hopkins `_. +This release has been codenamed `Opalescent `_. + +From now on we only support Python 3.8 and above. +We will maintain compatibility with Python 3.8 until it's +EOL in 2024. + +*— Tomer Nosrati* + +Long Term Support Policy +------------------------ + +We no longer support Celery 4.x as we don't have the resources to do so. +If you'd like to help us, all contributions are welcome. + +Celery 5.x **is not** an LTS release. We will support it until the release +of Celery 6.x. + +We're in the process of defining our Long Term Support policy. +Watch the next "What's New" document for updates. + +Wall of Contributors +-------------------- + +.. note:: + + This wall was automatically generated from git history, + so sadly it doesn't not include the people who help with more important + things like answering mailing-list questions. + +Upgrading from Celery 4.x +========================= + +Step 1: Adjust your command line invocation +------------------------------------------- + +Celery 5.0 introduces a new CLI implementation which isn't completely backwards compatible. + +The global options can no longer be positioned after the sub-command. +Instead, they must be positioned as an option for the `celery` command like so:: + + celery --app path.to.app worker + +If you were using our :ref:`daemonizing` guide to deploy Celery in production, +you should revisit it for updates. + +Step 2: Update your configuration with the new setting names +------------------------------------------------------------ + +If you haven't already updated your configuration when you migrated to Celery 4.0, +please do so now. + +We elected to extend the deprecation period until 6.0 since +we did not loudly warn about using these deprecated settings. + +Please refer to the :ref:`migration guide ` for instructions. + +Step 3: Read the important notes in this document +------------------------------------------------- + +Make sure you are not affected by any of the important upgrade notes +mentioned in the :ref:`following section `. + +You should verify that none of the breaking changes in the CLI +do not affect you. Please refer to :ref:`New Command Line Interface ` for details. + +Step 4: Migrate your code to Python 3 +------------------------------------- + +Celery 5.x only supports Python 3. Therefore, you must ensure your code is +compatible with Python 3. + +If you haven't ported your code to Python 3, you must do so before upgrading. + +You can use tools like `2to3 `_ +and `pyupgrade `_ to assist you with +this effort. + +After the migration is done, run your test suite with Celery 4 to ensure +nothing has been broken. + +Step 5: Upgrade to Celery 5.4 +----------------------------- + +At this point you can upgrade your workers and clients with the new version. + +.. _v540-important: + +Important Notes +=============== + +Supported Python Versions +------------------------- + +The supported Python versions are: + +- CPython 3.8 +- CPython 3.9 +- CPython 3.10 +- PyPy3.8 7.3.11 (``pypy3``) + +Experimental support +~~~~~~~~~~~~~~~~~~~~ + +Celery supports these Python versions provisionally as they are not production +ready yet: + +- CPython 3.11 + +Quality Improvements and Stability Enhancements +----------------------------------------------- + +Celery 5.4 focuses on elevating the overall quality and stability of the project. +We have dedicated significant efforts to address various bugs, enhance performance, +and make improvements based on valuable user feedback. + +Better Compatibility and Upgrade Confidence +------------------------------------------- + +Our goal with Celery 5.4 is to instill confidence in users who are currently +using Celery 4 or older versions. We want to assure you that upgrading to +Celery 5.4 will provide a more robust and reliable experience. + +Dropped support for Python 3.7 +------------------------------ + +Celery now requires Python 3.8 and above. + +Python 3.7 will reach EOL in June, 2023. +In order to focus our efforts we have dropped support for Python 3.6 in +this version. + +If you still require to run Celery using Python 3.7 +you can still use Celery 5.2. +However we encourage you to upgrade to a supported Python version since +no further security patches will be applied for Python 3.7 after +the 23th of June, 2023. + +Kombu +----- + +Starting from v5.4.0, the minimum required version is Kombu 5.3. + +Redis +----- + +redis-py 4.5.x is the new minimum required version. + + +SQLAlchemy +--------------------- + +SQLAlchemy 1.4.x & 2.0.x is now supported in celery v5.4 + + +Billiard +------------------- + +Minimum required version is now 4.1.0 + + +Deprecate pytz and use zoneinfo +------------------------------- + +A switch have been made to zoneinfo for handling timezone data instead of pytz. + +Django +------ + +Minimum django version is bumped to v2.2.28. +Also added --skip-checks flag to bypass django core checks. From 92514ac88afc4ccdff31f3a1018b04499607ca1e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 17 Apr 2024 23:28:20 +0300 Subject: [PATCH 0698/1051] =?UTF-8?q?Bump=20version:=205.4.0rc2=20?= =?UTF-8?q?=E2=86=92=205.4.0=20(#8974)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index dccdb437f6b..46fe5a41ff2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.4.0rc2 +current_version = 5.4.0 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/README.rst b/README.rst index 6a8c12f5930..ed0e243d6c8 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.4.0rc2 (opalescent) +:Version: 5.4.0 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 9894bc7e322..5b93aa4bf5b 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'opalescent' -__version__ = '5.4.0rc2' +__version__ = '5.4.0' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 6de1f1f9ea0..267137202ae 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.4.0rc2 (opalescent) +:Version: 5.4.0 (opalescent) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 905ce2e923bb80cbbe28180d91342760c8c53bd3 Mon Sep 17 00:00:00 2001 From: Francesco Cataldo <78490028+FraCata00@users.noreply.github.com> Date: Sat, 20 Apr 2024 14:01:33 +0200 Subject: [PATCH 0699/1051] fix(docs): use correct version celery v.5.4.x (#8975) - new release -> https://github.com/celery/celery/releases/tag/v5.4.0 --- README.rst | 2 +- docs/django/first-steps-with-django.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index ed0e243d6c8..7e911dd44ec 100644 --- a/README.rst +++ b/README.rst @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.3.5 coming from previous versions then you should read our +new to Celery v5.4.x coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index b8a9f739e7b..7091e391c01 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -19,7 +19,7 @@ Using Celery with Django .. note:: - Celery 5.3.x supports Django 2.2 LTS or newer versions. + Celery 5.4.x supports Django 2.2 LTS or newer versions. Please use Celery 5.2.x for versions older than Django 2.2 or Celery 4.4.x if your Django version is older than 1.11. To use Celery with your Django project you must first define From 1a10133c294c659b4650df32f8dd154a41078fb4 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 24 Apr 2024 10:26:02 -0700 Subject: [PATCH 0700/1051] Update mypy from 1.9.0 to 1.10.0 (#8977) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index ddff18a7c33..87a3357af57 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.2.1 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.9.0; platform_python_implementation=="CPython" +mypy==1.10.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt From 04af085f6d21d85cecaeafc406f8c08cb12502e7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 28 Apr 2024 16:45:07 +0300 Subject: [PATCH 0701/1051] Limit pymongo<4.7 when Python <= 3.10 due to breaking changes in 4.7 (#8988) --- requirements/extras/mongodb.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 899879c628d..25b1a866336 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1,2 @@ -pymongo[srv]>=4.0.2 +pymongo[srv]>=4.0.2; python_version > '3.10' +pymongo[srv]<4.7; python_version <= '3.10' From 47e58f6f18eeac529c8a4f52cf50f4150cfbc893 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 28 Apr 2024 20:18:18 +0300 Subject: [PATCH 0702/1051] Bump pytest from 8.1.1 to 8.2.0 (#8987) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.1.1 to 8.2.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.1.1...8.2.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 87a3357af57..9c69caa3904 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.1.1 +pytest==8.2.0 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 From 5933ddb49c6c7cae9149b02ee69f3471b4dba11f Mon Sep 17 00:00:00 2001 From: pedroimpulcetto Date: Wed, 24 Apr 2024 20:59:51 -0300 Subject: [PATCH 0703/1051] including FastAPI as a framework integration --- README.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.rst b/README.rst index 7e911dd44ec..c2737fb6ea2 100644 --- a/README.rst +++ b/README.rst @@ -201,6 +201,8 @@ integration packages: +--------------------+------------------------+ | `Tornado`_ | `tornado-celery`_ | +--------------------+------------------------+ + | `FastAPI`_ | not needed | + +--------------------+------------------------+ The integration packages aren't strictly necessary, but they can make development easier, and sometimes they add important hooks like closing @@ -217,6 +219,7 @@ database connections at ``fork``. .. _`web2py-celery`: https://code.google.com/p/web2py-celery/ .. _`Tornado`: https://www.tornadoweb.org/ .. _`tornado-celery`: https://github.com/mher/tornado-celery/ +.. _`FastAPI`: https://fastapi.tiangolo.com/ .. _celery-documentation: From bd1152c1f3326a93abb53d87317326c34b8a8723 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 20:36:57 +0300 Subject: [PATCH 0704/1051] [pre-commit.ci] pre-commit autoupdate (#8992) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.9.0 → v1.10.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.9.0...v1.10.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2e6ce34bbc7..b0312854b68 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.10.0 hooks: - id: mypy pass_filenames: false From 90ff2e1290ef058e8dc59dc1d86ff38f668abc50 Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Mon, 29 Apr 2024 19:46:38 +0100 Subject: [PATCH 0705/1051] Clarify return values of ..._on_commit methods (#8984) * Clarify delay_on_commit documentation doesn't return the task ID * Update signature for delay_on_commit and apply_async_on_commit * Update tests * Update docs/django/first-steps-with-django.rst --------- Co-authored-by: Tomer Nosrati --- celery/contrib/django/task.py | 8 ++++---- docs/django/first-steps-with-django.rst | 5 +++++ t/unit/contrib/django/test_task.py | 4 ++-- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/celery/contrib/django/task.py b/celery/contrib/django/task.py index eacc7c66471..b0dc6677553 100644 --- a/celery/contrib/django/task.py +++ b/celery/contrib/django/task.py @@ -12,10 +12,10 @@ class DjangoTask(Task): Provide a nicer API to trigger tasks at the end of the DB transaction. """ - def delay_on_commit(self, *args, **kwargs): + def delay_on_commit(self, *args, **kwargs) -> None: """Call :meth:`~celery.app.task.Task.delay` with Django's ``on_commit()``.""" - return transaction.on_commit(functools.partial(self.delay, *args, **kwargs)) + transaction.on_commit(functools.partial(self.delay, *args, **kwargs)) - def apply_async_on_commit(self, *args, **kwargs): + def apply_async_on_commit(self, *args, **kwargs) -> None: """Call :meth:`~celery.app.task.Task.apply_async` with Django's ``on_commit()``.""" - return transaction.on_commit(functools.partial(self.apply_async, *args, **kwargs)) + transaction.on_commit(functools.partial(self.apply_async, *args, **kwargs)) diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index 7091e391c01..5f93fb3ec63 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -206,6 +206,11 @@ This API takes care of wrapping the call into the `on_commit`_ hook for you. In rare cases where you want to trigger a task without waiting, the existing :meth:`~celery.app.task.Task.delay` API is still available. +One key difference compared to the ``delay`` method, is that ``delay_on_commit`` +will NOT return the task ID back to the caller. The task is not sent to the broker +when you call the method, only when the Django transaction finishes. If you need the +task ID, best to stick to :meth:`~celery.app.task.Task.delay`. + This task class should be used automatically if you've follow the setup steps above. However, if your app :ref:`uses a custom task base class `, you'll need inherit from :class:`~celery.contrib.django.task.DjangoTask` instead of diff --git a/t/unit/contrib/django/test_task.py b/t/unit/contrib/django/test_task.py index 52b45b84bc4..d1efa591d2b 100644 --- a/t/unit/contrib/django/test_task.py +++ b/t/unit/contrib/django/test_task.py @@ -25,8 +25,8 @@ def on_commit(self): def test_delay_on_commit(self, task_instance, on_commit): result = task_instance.delay_on_commit() - assert result is not None + assert result is None def test_apply_async_on_commit(self, task_instance, on_commit): result = task_instance.apply_async_on_commit() - assert result is not None + assert result is None From 5386e3e7772ab1d8145b8d55cb7da24594d48434 Mon Sep 17 00:00:00 2001 From: Tom Huibregtse Date: Wed, 1 May 2024 11:35:11 -0500 Subject: [PATCH 0706/1051] add kafka broker docs (#8935) * add kafka broker docs * modify config options to be more accurate * add additional documentation on findings * update config and add limitations * sasl --- .../backends-and-brokers/index.rst | 3 + .../backends-and-brokers/kafka.rst | 82 +++++++++++++++++++ 2 files changed, 85 insertions(+) create mode 100644 docs/getting-started/backends-and-brokers/kafka.rst diff --git a/docs/getting-started/backends-and-brokers/index.rst b/docs/getting-started/backends-and-brokers/index.rst index 92daf812204..0c5861fe0fb 100644 --- a/docs/getting-started/backends-and-brokers/index.rst +++ b/docs/getting-started/backends-and-brokers/index.rst @@ -20,6 +20,7 @@ Broker Instructions rabbitmq redis sqs + kafka .. _broker-overview: @@ -41,6 +42,8 @@ individual transport (see :ref:`broker_toc`). +---------------+--------------+----------------+--------------------+ | *Zookeeper* | Experimental | No | No | +---------------+--------------+----------------+--------------------+ +| *Kafka* | Experimental | No | No | ++---------------+--------------+----------------+--------------------+ Experimental brokers may be functional but they don't have dedicated maintainers. diff --git a/docs/getting-started/backends-and-brokers/kafka.rst b/docs/getting-started/backends-and-brokers/kafka.rst new file mode 100644 index 00000000000..ab0627fd384 --- /dev/null +++ b/docs/getting-started/backends-and-brokers/kafka.rst @@ -0,0 +1,82 @@ +.. _broker-kafka: + +============= + Using Kafka +============= + +.. _broker-Kafka-installation: + +Configuration +============= + +For celeryconfig.py: + +.. code-block:: python + + import os + + task_serializer = 'json' + broker_transport_options = { + # "allow_create_topics": True, + } + broker_connection_retry_on_startup = True + + # For using SQLAlchemy as the backend + # result_backend = 'db+postgresql://postgres:example@localhost/postgres' + + broker_transport_options.update({ + "security_protocol": "SASL_SSL", + "sasl_mechanism": "SCRAM-SHA-512", + }) + sasl_username = os.environ["SASL_USERNAME"] + sasl_password = os.environ["SASL_PASSWORD"] + broker_url = f"confluentkafka://{sasl_username}:{sasl_password}@broker:9094" + kafka_admin_config = { + "sasl.username": sasl_username, + "sasl.password": sasl_password, + } + kafka_common_config = { + "sasl.username": sasl_username, + "sasl.password": sasl_password, + "security.protocol": "SASL_SSL", + "sasl.mechanism": "SCRAM-SHA-512", + "bootstrap_servers": "broker:9094", + } + +Please note that "allow_create_topics" is needed if the topic does not exist +yet but is not necessary otherwise. + +For tasks.py: + +.. code-block:: python + + from celery import Celery + + app = Celery('tasks') + app.config_from_object('celeryconfig') + + + @app.task + def add(x, y): + return x + y + +Auth +==== + +See above. The SASL username and password are passed in as environment variables. + +Further Info +============ + +Celery queues get routed to Kafka topics. For example, if a queue is named "add_queue", +then a topic named "add_queue" will be created/used in Kafka. + +For canvas, when using a backend that supports it, the typical mechanisms like +chain, group, and chord seem to work. + + +Limitations +=========== + +Currently, using Kafka as a broker means that only one worker can be used. +See https://github.com/celery/kombu/issues/1785. From 7ce2e41e85d77756ea269870ace6ca1c04e5ebb1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 2 May 2024 21:23:37 +0300 Subject: [PATCH 0707/1051] Limit pymongo<4.7 regardless of Python version (#8999) --- requirements/extras/mongodb.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 25b1a866336..8d80c53d0b4 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1,2 +1 @@ -pymongo[srv]>=4.0.2; python_version > '3.10' -pymongo[srv]<4.7; python_version <= '3.10' +pymongo[srv]>=4.0.2, <4.7 From 078f80fd58444278cf622099d8f5cdf50fda0ee2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 May 2024 10:43:22 +0300 Subject: [PATCH 0708/1051] Update pymongo[srv] requirement from <4.7,>=4.0.2 to >=4.0.2,<4.8 (#9000) Updates the requirements on [pymongo[srv]](https://github.com/mongodb/mongo-python-driver) to permit the latest version. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.0.2...4.7.1) --- updated-dependencies: - dependency-name: pymongo[srv] dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 8d80c53d0b4..e7c9111e8c4 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo[srv]>=4.0.2, <4.7 +pymongo[srv]>=4.0.2, <4.8 From e9ebd657b0327dde2170706d8d6b81f01e7bdad0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 3 May 2024 22:28:37 +0000 Subject: [PATCH 0709/1051] Update elasticsearch requirement from <=8.13.0 to <=8.13.1 Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.13.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index d32c9d2108e..6d71aae7a47 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.13.0 +elasticsearch<=8.13.1 elastic-transport<=8.13.0 From 91c5b902dd41e1731438a22c86a095409f4c3753 Mon Sep 17 00:00:00 2001 From: Shirsa <30934528+shirsa@users.noreply.github.com> Date: Sun, 5 May 2024 13:57:56 +0300 Subject: [PATCH 0710/1051] security: SecureSerializer: support generic low-level serializers (#8982) Co-authored-by: Asif Saif Uddin --- celery/security/serialization.py | 7 ++++--- t/unit/security/test_serialization.py | 14 +++++++++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/celery/security/serialization.py b/celery/security/serialization.py index c58ef906542..937abe63c72 100644 --- a/celery/security/serialization.py +++ b/celery/security/serialization.py @@ -29,7 +29,8 @@ def serialize(self, data): assert self._cert is not None with reraise_errors('Unable to serialize: {0!r}', (Exception,)): content_type, content_encoding, body = dumps( - bytes_to_str(data), serializer=self._serializer) + data, serializer=self._serializer) + # What we sign is the serialized body, not the body itself. # this way the receiver doesn't have to decode the contents # to verify the signature (and thus avoiding potential flaws @@ -48,7 +49,7 @@ def deserialize(self, data): payload['signer'], payload['body']) self._cert_store[signer].verify(body, signature, self._digest) - return loads(bytes_to_str(body), payload['content_type'], + return loads(body, payload['content_type'], payload['content_encoding'], force=True) def _pack(self, body, content_type, content_encoding, signer, signature, @@ -84,7 +85,7 @@ def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): 'signature': signature, 'content_type': bytes_to_str(v[0]), 'content_encoding': bytes_to_str(v[1]), - 'body': bytes_to_str(v[2]), + 'body': v[2], } diff --git a/t/unit/security/test_serialization.py b/t/unit/security/test_serialization.py index 6caf3857b81..cb16d9f14fc 100644 --- a/t/unit/security/test_serialization.py +++ b/t/unit/security/test_serialization.py @@ -16,15 +16,19 @@ class test_secureserializer(SecurityCase): - def _get_s(self, key, cert, certs): + def _get_s(self, key, cert, certs, serializer="json"): store = CertStore() for c in certs: store.add_cert(Certificate(c)) - return SecureSerializer(PrivateKey(key), Certificate(cert), store) + return SecureSerializer( + PrivateKey(key), Certificate(cert), store, serializer=serializer + ) - def test_serialize(self): - s = self._get_s(KEY1, CERT1, [CERT1]) - assert s.deserialize(s.serialize('foo')) == 'foo' + @pytest.mark.parametrize("data", [1, "foo", b"foo", {"foo": 1}]) + @pytest.mark.parametrize("serializer", ["json", "pickle"]) + def test_serialize(self, data, serializer): + s = self._get_s(KEY1, CERT1, [CERT1], serializer=serializer) + assert s.deserialize(s.serialize(data)) == data def test_deserialize(self): s = self._get_s(KEY1, CERT1, [CERT1]) From 77dbc05e5928d207c826afd09430b6172c34591d Mon Sep 17 00:00:00 2001 From: John Lewis <9gj2mk85rq@snkmail.com> Date: Sun, 5 May 2024 06:58:55 -0400 Subject: [PATCH 0711/1051] don't kill if pid same as file (#8997) (#8998) * don't kill if pid same as file (#8997) * test for don't kill if pid same as file (#8997) * restore file permission --------- Co-authored-by: Asif Saif Uddin --- celery/platforms.py | 3 +++ t/unit/utils/test_platforms.py | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/celery/platforms.py b/celery/platforms.py index 6203f2c29b5..1375fd82c0b 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -186,6 +186,9 @@ def remove_if_stale(self): if not pid: self.remove() return True + if pid == os.getpid(): + # this can be common in k8s pod with PID of 1 - don't kill + return True try: os.kill(pid, 0) diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index ab1a9436543..3f4e47ae339 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -689,6 +689,15 @@ def test_remove_if_stale_no_pidfile(self): assert p.remove_if_stale() p.remove.assert_called_with() + def test_remove_if_stale_same_pid(self): + p = Pidfile('/var/pid') + p.read_pid = Mock() + p.read_pid.return_value = os.getpid() + p.remove = Mock() + + assert p.remove_if_stale() + p.remove.assert_not_called() + @patch('os.fsync') @patch('os.getpid') @patch('os.open') From a68f3aadc20dd25ffbc99d985f794d1ceaaba9af Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 5 May 2024 12:40:33 -0700 Subject: [PATCH 0712/1051] Update cryptography from 42.0.5 to 42.0.6 (#9005) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 3beaa30e8a6..1c53c57ea4e 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.5 +cryptography==42.0.6 From 9255236afee58690a091934cdd9f9fc78a534901 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 07:35:19 +0300 Subject: [PATCH 0713/1051] Bump cryptography from 42.0.6 to 42.0.7 (#9009) Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.6 to 42.0.7. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.6...42.0.7) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 1c53c57ea4e..ca19e20b484 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.6 +cryptography==42.0.7 From c251e34554bd4cb8664a532fe07780d8f17c6630 Mon Sep 17 00:00:00 2001 From: John Lewis Date: Mon, 6 May 2024 14:14:36 -0400 Subject: [PATCH 0714/1051] don't kill if pid same as file (#8997) (#8998) The pid file needs to be deleted. --- celery/platforms.py | 1 + t/unit/utils/test_platforms.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/celery/platforms.py b/celery/platforms.py index 1375fd82c0b..a9c30a3251e 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -188,6 +188,7 @@ def remove_if_stale(self): return True if pid == os.getpid(): # this can be common in k8s pod with PID of 1 - don't kill + self.remove() return True try: diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index 3f4e47ae339..fdac88288dc 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -696,7 +696,7 @@ def test_remove_if_stale_same_pid(self): p.remove = Mock() assert p.remove_if_stale() - p.remove.assert_not_called() + p.remove.assert_called_with() @patch('os.fsync') @patch('os.getpid') From 780d3b5c46c2eecc4e735f090070cad6b2c93539 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 9 May 2024 21:47:13 +0300 Subject: [PATCH 0715/1051] Added -vv to unit, integration and smoke tests (#9014) --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index 37a568a00b2..826b7cc02d4 100644 --- a/tox.ini +++ b/tox.ini @@ -43,9 +43,9 @@ deps= bandit: bandit commands = - unit: pytest --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} - integration: pytest -xsv t/integration {posargs} - smoke: pytest -xsv t/smoke --dist=loadscope --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} + unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} + integration: pytest -xsvv t/integration {posargs} + smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 4cf6ba3858779a3b71c87001133506c286aec0cd Mon Sep 17 00:00:00 2001 From: Shirsa <30934528+shirsa@users.noreply.github.com> Date: Mon, 13 May 2024 06:49:43 +0300 Subject: [PATCH 0716/1051] SecuritySerializer: ensure pack separator will not be conflicted with serialized fields (#9010) * security: ensure pack separator will not be conflicted with serialized fields * SecureSerializer: export DEFAULT_SEPARATOR to const --- celery/security/serialization.py | 42 ++++++++++----------------- t/unit/security/test_serialization.py | 6 ++-- 2 files changed, 19 insertions(+), 29 deletions(-) diff --git a/celery/security/serialization.py b/celery/security/serialization.py index 937abe63c72..7b7dc1261f8 100644 --- a/celery/security/serialization.py +++ b/celery/security/serialization.py @@ -11,6 +11,11 @@ __all__ = ('SecureSerializer', 'register_auth') +# Note: we guarantee that this value won't appear in the serialized data, +# so we can use it as a separator. +# If you change this value, make sure it's not present in the serialized data. +DEFAULT_SEPARATOR = str_to_bytes("\x00\x01") + class SecureSerializer: """Signed serializer.""" @@ -53,39 +58,22 @@ def deserialize(self, data): payload['content_encoding'], force=True) def _pack(self, body, content_type, content_encoding, signer, signature, - sep=str_to_bytes('\x00\x01')): + sep=DEFAULT_SEPARATOR): fields = sep.join( - ensure_bytes(s) for s in [signer, signature, content_type, - content_encoding, body] + ensure_bytes(s) for s in [b64encode(signer), b64encode(signature), + content_type, content_encoding, body] ) return b64encode(fields) - def _unpack(self, payload, sep=str_to_bytes('\x00\x01')): + def _unpack(self, payload, sep=DEFAULT_SEPARATOR): raw_payload = b64decode(ensure_bytes(payload)) - first_sep = raw_payload.find(sep) - - signer = raw_payload[:first_sep] - signer_cert = self._cert_store[signer] - - # shift 3 bits right to get signature length - # 2048bit rsa key has a signature length of 256 - # 4096bit rsa key has a signature length of 512 - sig_len = signer_cert.get_pubkey().key_size >> 3 - sep_len = len(sep) - signature_start_position = first_sep + sep_len - signature_end_position = signature_start_position + sig_len - signature = raw_payload[ - signature_start_position:signature_end_position - ] - - v = raw_payload[signature_end_position + sep_len:].split(sep) - + v = raw_payload.split(sep, maxsplit=4) return { - 'signer': signer, - 'signature': signature, - 'content_type': bytes_to_str(v[0]), - 'content_encoding': bytes_to_str(v[1]), - 'body': v[2], + 'signer': b64decode(v[0]), + 'signature': b64decode(v[1]), + 'content_type': bytes_to_str(v[2]), + 'content_encoding': bytes_to_str(v[3]), + 'body': v[4], } diff --git a/t/unit/security/test_serialization.py b/t/unit/security/test_serialization.py index cb16d9f14fc..5582a0be8d1 100644 --- a/t/unit/security/test_serialization.py +++ b/t/unit/security/test_serialization.py @@ -8,7 +8,7 @@ from celery.exceptions import SecurityError from celery.security.certificate import Certificate, CertStore from celery.security.key import PrivateKey -from celery.security.serialization import SecureSerializer, register_auth +from celery.security.serialization import DEFAULT_SEPARATOR, SecureSerializer, register_auth from . import CERT1, CERT2, KEY1, KEY2 from .case import SecurityCase @@ -24,7 +24,9 @@ def _get_s(self, key, cert, certs, serializer="json"): PrivateKey(key), Certificate(cert), store, serializer=serializer ) - @pytest.mark.parametrize("data", [1, "foo", b"foo", {"foo": 1}]) + @pytest.mark.parametrize( + "data", [1, "foo", b"foo", {"foo": 1}, {"foo": DEFAULT_SEPARATOR}] + ) @pytest.mark.parametrize("serializer", ["json", "pickle"]) def test_serialize(self, data, serializer): s = self._get_s(KEY1, CERT1, [CERT1], serializer=serializer) From 4139d7ad844598aca82f126384d9558c6ca61372 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 14 May 2024 16:42:52 +0300 Subject: [PATCH 0717/1051] Update sphinx-click from 5.1.0 to 5.2.2 --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index d4d43fb27c2..745de87b505 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery>=2.1.1 Sphinx>=7.0.0 sphinx-testing~=1.0.1 -sphinx-click==5.1.0 +sphinx-click==5.2.2 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From 11798fcad07a1a58e236e83506e2beeb8824136b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 May 2024 22:10:38 +0000 Subject: [PATCH 0718/1051] Bump sphinx-click from 5.2.2 to 6.0.0 Bumps [sphinx-click](https://github.com/click-contrib/sphinx-click) from 5.2.2 to 6.0.0. - [Release notes](https://github.com/click-contrib/sphinx-click/releases) - [Commits](https://github.com/click-contrib/sphinx-click/commits/6.0.0) --- updated-dependencies: - dependency-name: sphinx-click dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 745de87b505..38f4a6a6b4c 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ sphinx_celery>=2.1.1 Sphinx>=7.0.0 sphinx-testing~=1.0.1 -sphinx-click==5.2.2 +sphinx-click==6.0.0 -r extras/sqlalchemy.txt -r test.txt -r deps/mock.txt From 83b100a1760720c05e28ba332bbb77426bf4ea64 Mon Sep 17 00:00:00 2001 From: ppawlak Date: Tue, 21 May 2024 11:04:12 +0200 Subject: [PATCH 0719/1051] =?UTF-8?q?=20Fix=20a=20typo=20to=20display=20th?= =?UTF-8?q?e=20help=20message=20in=20first-steps-with-django.rst=E2=80=A6?= =?UTF-8?q?=20=20=E2=80=A6=20documentation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/django/first-steps-with-django.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index 5f93fb3ec63..f069334caac 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -308,7 +308,7 @@ use the help command: .. code-block:: console - $ celery help + $ celery --help Where to go from here ===================== From 909d70b3bc3e9cd5420d32a385ca5701d910ab6c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 23 May 2024 03:14:56 +0300 Subject: [PATCH 0720/1051] Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039) --- requirements/test.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements/test.txt b/requirements/test.txt index 9c69caa3904..e3f7b39c287 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,3 +1,6 @@ +# Temporary fix until requests is fixed - to be reverted afterwards: +# https://github.com/docker/docker-py/issues/3256 +requests==2.31.0 pytest==8.2.0 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 From ee90bed1df866c8a266ac353f7e6eae8ab8a5d72 Mon Sep 17 00:00:00 2001 From: SPKorhonen Date: Thu, 23 May 2024 06:40:02 +0300 Subject: [PATCH 0721/1051] Fix certificate validity check (#9037) * Fix certificate validity check Use 'not_valid_after_utc' instead of 'not_valid_after' when checking for certificate validity to prevent errors with avare/naive timedates Fixes error: File "<>\site-packages\celery\security\certificate.py", line 46, in has_expired return datetime.datetime.now(datetime.timezone.utc) >= self._cert.not_valid_after ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ TypeError: can't compare offset-naive and offset-aware datetimes * Fix tests --- celery/security/certificate.py | 2 +- t/unit/security/test_certificate.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/celery/security/certificate.py b/celery/security/certificate.py index 2691904d432..edaa764be5c 100644 --- a/celery/security/certificate.py +++ b/celery/security/certificate.py @@ -43,7 +43,7 @@ def __init__(self, cert: str) -> None: def has_expired(self) -> bool: """Check if the certificate has expired.""" - return datetime.datetime.now(datetime.timezone.utc) >= self._cert.not_valid_after + return datetime.datetime.now(datetime.timezone.utc) >= self._cert.not_valid_after_utc def get_pubkey(self) -> ( DSAPublicKey | EllipticCurvePublicKey | Ed448PublicKey | Ed25519PublicKey | RSAPublicKey diff --git a/t/unit/security/test_certificate.py b/t/unit/security/test_certificate.py index 68b05fa03ee..4c72a1d6812 100644 --- a/t/unit/security/test_certificate.py +++ b/t/unit/security/test_certificate.py @@ -41,7 +41,7 @@ def test_has_expired_mock(self): x._cert = Mock(name='cert') time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=-1) - x._cert.not_valid_after = time_after + x._cert.not_valid_after_utc = time_after assert x.has_expired() is True @@ -50,7 +50,7 @@ def test_has_not_expired_mock(self): x._cert = Mock(name='cert') time_after = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=1) - x._cert.not_valid_after = time_after + x._cert.not_valid_after_utc = time_after assert x.has_expired() is False From a95e626a7dddc3c05cf97eca44b0c2aff1fc9a55 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 24 May 2024 18:51:16 +0300 Subject: [PATCH 0722/1051] Revert "Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039)" (#9043) This reverts commit 909d70b3bc3e9cd5420d32a385ca5701d910ab6c. --- requirements/test.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index e3f7b39c287..9c69caa3904 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,3 @@ -# Temporary fix until requests is fixed - to be reverted afterwards: -# https://github.com/docker/docker-py/issues/3256 -requests==2.31.0 pytest==8.2.0 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 From b2391b419f52cafacbf3c2f665cbd664f54530a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 May 2024 20:12:40 +0300 Subject: [PATCH 0723/1051] --- (#9035) updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 9c69caa3904..5a33d88adfd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.2.0 +pytest==8.2.1 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 From 448ada2a0e905917def48c20abfe04a566c01fc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 25 May 2024 19:34:19 +0300 Subject: [PATCH 0724/1051] Update elasticsearch requirement from <=8.13.1 to <=8.13.2 (#9045) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.13.2) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 6d71aae7a47..bd1057f2a56 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.13.1 +elasticsearch<=8.13.2 elastic-transport<=8.13.0 From ae5eeb08a1052ad6c32990c69dcd2ed07696fd73 Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Mon, 27 May 2024 06:09:10 +0100 Subject: [PATCH 0725/1051] Fix detection of custom task set as class attribute with Django (#9038) * Add test case for customized task as class attribute with Django As reported in https://github.com/celery/celery/pull/8491#issuecomment-2119191201 * Fix detection of customized task as class attribute with Django --------- Co-authored-by: Asif Saif Uddin --- celery/app/base.py | 7 ++++++- t/unit/fixups/test_django.py | 21 +++++++++++++++++++-- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 863f264f854..63f3d54abec 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -240,7 +240,12 @@ def __init__(self, main=None, loader=None, backend=None, self.loader_cls = loader or self._get_default_loader() self.log_cls = log or self.log_cls self.control_cls = control or self.control_cls - self._custom_task_cls_used = bool(task_cls) + self._custom_task_cls_used = ( + # Custom task class provided as argument + bool(task_cls) + # subclass of Celery with a task_cls attribute + or self.__class__ is not Celery and hasattr(self.__class__, 'task_cls') + ) self.task_cls = task_cls or self.task_cls self.set_as_current = set_as_current self.registry_cls = symbol_by_name(self.registry_cls) diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index b25bf0879b5..72b4d60d873 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -103,7 +103,7 @@ def test_install(self, patching, module): self.sigs.worker_init.connect.assert_called_with(f.on_worker_init) assert self.app.loader.now == f.now - # Specialized Task class is used + # Specialized DjangoTask class is used assert self.app.task_cls == 'celery.contrib.django.task:DjangoTask' from celery.contrib.django.task import DjangoTask assert issubclass(f.app.Task, DjangoTask) @@ -120,9 +120,26 @@ def test_install_custom_user_task(self, patching): with self.fixup_context(self.app) as (f, _, _): f.install() - # Specialized Task class is NOT used + # Specialized DjangoTask class is NOT used, + # The one from the user's class is assert self.app.task_cls == 'myapp.celery.tasks:Task' + def test_install_custom_user_task_as_class_attribute(self, patching): + patching('celery.fixups.django.signals') + + from celery.app import Celery + + class MyCeleryApp(Celery): + task_cls = 'myapp.celery.tasks:Task' + + app = MyCeleryApp('mytestapp') + + with self.fixup_context(app) as (f, _, _): + f.install() + # Specialized DjangoTask class is NOT used, + # The one from the user's class is + assert app.task_cls == 'myapp.celery.tasks:Task' + def test_now(self): with self.fixup_context(self.app) as (f, _, _): assert f.now(utc=True) From 0defd810bc7ae95a28093197feecf3a68a169bf2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 May 2024 04:29:37 +0300 Subject: [PATCH 0726/1051] Update elastic-transport requirement from <=8.13.0 to <=8.13.1 (#9050) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.13.1) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index bd1057f2a56..3d781e6b9bf 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.13.2 -elastic-transport<=8.13.0 +elastic-transport<=8.13.1 From 21548ba81363d444878b67bdb8dddaf073e93eb8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 May 2024 12:19:07 +0300 Subject: [PATCH 0727/1051] Bump pycouchdb from 1.14.2 to 1.16.0 (#9052) Bumps [pycouchdb](https://github.com/histrio/py-couchdb) from 1.14.2 to 1.16.0. - [Release notes](https://github.com/histrio/py-couchdb/releases) - [Changelog](https://github.com/histrio/py-couchdb/blob/master/CHANGES.rst) - [Commits](https://github.com/histrio/py-couchdb/compare/v1.14.2...v1.16.0) --- updated-dependencies: - dependency-name: pycouchdb dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/couchdb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/couchdb.txt b/requirements/extras/couchdb.txt index 3942c0d775f..083cca9d1f9 100644 --- a/requirements/extras/couchdb.txt +++ b/requirements/extras/couchdb.txt @@ -1 +1 @@ -pycouchdb==1.14.2 +pycouchdb==1.16.0 From 54f38249f0d85ef36a924f13a997167e8d2f16a9 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 4 Jun 2024 22:05:53 +0300 Subject: [PATCH 0728/1051] Update pytest from 8.2.1 to 8.2.2 --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 5a33d88adfd..bb4464df96d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.2.1 +pytest==8.2.2 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.12.1 From 50a4d41a06d2ffcaa52ba7ceb158a46e8dc3f989 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Jun 2024 19:56:24 +0300 Subject: [PATCH 0729/1051] Bump cryptography from 42.0.7 to 42.0.8 (#9061) Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.7 to 42.0.8. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.7...42.0.8) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ca19e20b484..a7ee686f2d5 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.7 +cryptography==42.0.8 From cc304b251ba3eab29865db0fc4d4a6c1a9ee72a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Jun 2024 02:32:20 +0300 Subject: [PATCH 0730/1051] Update elasticsearch requirement from <=8.13.2 to <=8.14.0 (#9069) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.14.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 3d781e6b9bf..0a32eaf08d5 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.13.2 +elasticsearch<=8.14.0 elastic-transport<=8.13.1 From 9980db25413f795f5dd82f44f2dcf5e0817e4f8e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 20:29:48 +0300 Subject: [PATCH 0731/1051] [pre-commit.ci] pre-commit autoupdate (#9071) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.15.2 → v3.16.0](https://github.com/asottile/pyupgrade/compare/v3.15.2...v3.16.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b0312854b68..63ed85c214a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.16.0 hooks: - id: pyupgrade args: ["--py38-plus"] From 8908f4caa0f3a50264785bd437eead9a747b416b Mon Sep 17 00:00:00 2001 From: farahats9 Date: Tue, 11 Jun 2024 15:48:31 +0300 Subject: [PATCH 0732/1051] [enhance feature] Crontab schedule: allow using month names (#9068) * Update schedules.py * Update time.py * added unit tests * remove whitespace for linter --------- Co-authored-by: Asif Saif Uddin --- celery/schedules.py | 9 ++++++--- celery/utils/time.py | 18 ++++++++++++++++++ t/unit/app/test_schedules.py | 21 ++++++++++++++++++++- 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/celery/schedules.py b/celery/schedules.py index b35436ae74e..a60dd27ba3b 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -15,7 +15,7 @@ from . import current_app from .utils.collections import AttributeDict from .utils.time import (ffwd, humanize_seconds, localize, maybe_make_aware, maybe_timedelta, remaining, timezone, - weekday) + weekday, yearmonth) __all__ = ( 'ParseException', 'schedule', 'crontab', 'crontab_parser', @@ -300,9 +300,12 @@ def _expand_number(self, s: str) -> int: i = int(s) except ValueError: try: - i = weekday(s) + i = yearmonth(s) except KeyError: - raise ValueError(f'Invalid weekday literal {s!r}.') + try: + i = weekday(s) + except KeyError: + raise ValueError(f'Invalid weekday literal {s!r}.') max_val = self.min_ + self.max_ - 1 if i > max_val: diff --git a/celery/utils/time.py b/celery/utils/time.py index d27615cc10e..2c14db29d30 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -41,6 +41,9 @@ DAYNAMES = 'sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat' WEEKDAYS = dict(zip(DAYNAMES, range(7))) +MONTHNAMES = 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec' +YEARMONTHS = dict(zip(MONTHNAMES, range(1, 13))) + RATE_MODIFIER_MAP = { 's': lambda n: n, 'm': lambda n: n / 60.0, @@ -258,6 +261,21 @@ def weekday(name: str) -> int: raise KeyError(name) +def yearmonth(name: str) -> int: + """Return the position of a month: 1 - 12, where 1 is January. + + Example: + >>> yearmonth('january'), yearmonth('jan'), yearmonth('may') + (1, 1, 5) + """ + abbreviation = name[0:3].lower() + try: + return YEARMONTHS[abbreviation] + except KeyError: + # Show original day name in exception, instead of abbr. + raise KeyError(name) + + def humanize_seconds( secs: int, prefix: str = '', sep: str = '', now: str = 'now', microseconds: bool = False) -> str: diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index e5a7bfb7bdd..b9285e64d93 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -308,6 +308,20 @@ def test_not_weekday(self): ) assert next == datetime(2010, 9, 13, 0, 5) + def test_monthyear(self): + next = self.next_occurrence( + self.crontab(minute=30, hour=14, month_of_year='oct', day_of_month=18), + datetime(2010, 9, 11, 14, 30, 15), + ) + assert next == datetime(2010, 10, 18, 14, 30) + + def test_not_monthyear(self): + next = self.next_occurrence( + self.crontab(minute=[5, 42], month_of_year='nov-dec', day_of_month=13), + datetime(2010, 9, 11, 14, 30, 15), + ) + assert next == datetime(2010, 11, 13, 0, 5) + def test_monthday(self): next = self.next_occurrence( self.crontab(minute=30, hour=14, day_of_month=18), @@ -607,6 +621,11 @@ def test_crontab_spec_invalid_dom(self, day_of_month): @pytest.mark.parametrize('month_of_year,expected', [ (1, {1}), ('1', {1}), + ('feb', {2}), + ('Mar', {3}), + ('april', {4}), + ('may,jun,jul', {5, 6, 7}), + ('aug-oct', {8, 9, 10}), ('2,4,6', {2, 4, 6}), ('*/2', {1, 3, 5, 7, 9, 11}), ('2-12/2', {2, 4, 6, 8, 10, 12}), @@ -615,7 +634,7 @@ def test_crontab_spec_moy_formats(self, month_of_year, expected): c = self.crontab(month_of_year=month_of_year) assert c.month_of_year == expected - @pytest.mark.parametrize('month_of_year', [0, '0-5', 13, '12,13']) + @pytest.mark.parametrize('month_of_year', [0, '0-5', 13, '12,13', 'jaan', 'sebtember']) def test_crontab_spec_invalid_moy(self, month_of_year): with pytest.raises(ValueError): self.crontab(month_of_year=month_of_year) From 6f6b0ab84d8fe68b1d8f040f578299af5aa1d468 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 11 Jun 2024 16:55:16 +0300 Subject: [PATCH 0733/1051] Enhance tox environment: [testenv:clean] (#9072) --- Makefile | 2 +- tox.ini | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index 5342986415c..4ca210d1d98 100644 --- a/Makefile +++ b/Makefile @@ -139,7 +139,7 @@ clean-pyc: removepyc: clean-pyc clean-build: - rm -rf build/ dist/ .eggs/ *.egg-info/ .tox/ .coverage cover/ + rm -rf build/ dist/ .eggs/ *.egg-info/ .coverage cover/ clean-git: $(GIT) clean -xdn diff --git a/tox.ini b/tox.ini index 826b7cc02d4..d31c7b2932f 100644 --- a/tox.ini +++ b/tox.ini @@ -120,13 +120,13 @@ commands = pre-commit {posargs:run --all-files --show-diff-on-failure} [testenv:clean] -allowlist_externals = bash -commands_pre = - pip install cleanpy +deps = cleanpy +allowlist_externals = bash, make, rm commands = - python -m cleanpy . bash -c 'files=$(find . -name "*.coverage*" -type f); if [ -n "$files" ]; then echo "Removed coverage file(s):"; echo "$files" | tr " " "\n"; rm $files; fi' bash -c 'containers=$(docker ps -aq --filter label=creator=pytest-docker-tools); if [ -n "$containers" ]; then echo "Removed Docker container(s):"; docker rm -f $containers; fi' bash -c 'networks=$(docker network ls --filter name=pytest- -q); if [ -n "$networks" ]; then echo "Removed Docker network(s):"; docker network rm $networks; fi' bash -c 'volumes=$(docker volume ls --filter name=pytest- -q); if [ -n "$volumes" ]; then echo "Removed Docker volume(s):"; docker volume rm $volumes; fi' - + python -m cleanpy . + make clean + rm -f test.db statefilename.db 86 From d903c7a225bf278ea1918c851a8dad126d899900 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Enrique=20Carrillo=20Pino?= Date: Wed, 12 Jun 2024 17:20:37 -0500 Subject: [PATCH 0734/1051] Clarify docs about Reserve one task at a time --- docs/userguide/optimizing.rst | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/docs/userguide/optimizing.rst b/docs/userguide/optimizing.rst index 346f7374794..72ce4dc77cb 100644 --- a/docs/userguide/optimizing.rst +++ b/docs/userguide/optimizing.rst @@ -148,6 +148,15 @@ The task message is only deleted from the queue after the task is :term:`acknowledged`, so if the worker crashes before acknowledging the task, it can be redelivered to another worker (or the same after recovery). +Note that an exception is considered normal operation in Celery and it will be acknowledged. +Acknowledgments are really used to safeguard against failures that can not be normally +handled by the Python exception system (i.e. power failure, memory corruption, hardware failure, fatal signal, etc.). +For normal exceptions you should use task.retry() to retry the task. + +.. seealso:: + + Notes at :ref:`faq-acks_late-vs-retry`. + When using the default of early acknowledgment, having a prefetch multiplier setting of *one*, means the worker will reserve at most one extra task for every worker process: or in other words, if the worker is started with @@ -155,21 +164,16 @@ worker process: or in other words, if the worker is started with tasks (10 acknowledged tasks executing, and 10 unacknowledged reserved tasks) at any time. -Often users ask if disabling "prefetching of tasks" is possible, but what -they really mean by that, is to have a worker only reserve as many tasks as -there are worker processes (10 unacknowledged tasks for -:option:`-c 10 `) +Often users ask if disabling "prefetching of tasks" is possible, and it is +possible with a catch. You can have a worker only reserve as many tasks as +there are worker processes, with the condition that they are acknowledged +late (10 unacknowledged tasks executing for :option:`-c 10 `) -That's possible, but not without also enabling -:term:`late acknowledgment`. Using this option over the +For that, you need to enable :term:`late acknowledgment`. Using this option over the default behavior means a task that's already started executing will be retried in the event of a power failure or the worker instance being killed abruptly, so this also means the task must be :term:`idempotent` -.. seealso:: - - Notes at :ref:`faq-acks_late-vs-retry`. - You can enable this behavior by using the following configuration options: .. code-block:: python @@ -177,6 +181,10 @@ You can enable this behavior by using the following configuration options: task_acks_late = True worker_prefetch_multiplier = 1 +If you want to disable "prefetching of tasks" without using ack_late (because +your tasks are not idempotent) that's impossible right now and you can join the +discussion here https://github.com/celery/celery/discussions/7106 + Memory Usage ------------ From f9ca3d0d44e3d7374e96d6fcad9e670456724ef7 Mon Sep 17 00:00:00 2001 From: Ben Lewis Date: Sat, 15 Jun 2024 15:47:23 +0300 Subject: [PATCH 0735/1051] GCS docs fixes (#9075) * Fix `gcs://` typo (it should be `gs://`) * Fix `gcs_project` in URI version of `gs://` too --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 00893d4e230..f5c3f280aa4 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1823,7 +1823,7 @@ GCS backend settings GCS could be configured via the URL provided in :setting:`result_backend`, for example:: - result_backend = 'gcs://mybucket/some-prefix?project=myproject&ttl=600' + result_backend = 'gs://mybucket/some-prefix?gcs_project=myproject&ttl=600' This backend requires the following configuration directives to be set: From 4c3e5a1a25ee7d67cc50d9db560e0102e0b06c23 Mon Sep 17 00:00:00 2001 From: Idan Haim Shalom Date: Sun, 16 Jun 2024 09:48:28 +0300 Subject: [PATCH 0736/1051] Use hub.remove_writer instead of hub.remove for write fds (#4185) (#9055) - fix main process Unrecoverable error: AssertionError() when read fd is deleted - see https://github.com/celery/celery/issues/4185#issuecomment-2139390090 - tests: - change hub.remove to hub.remove_writer in test_poll_write_generator and test_poll_write_generator_stopped - add 3 more tests for schedule_writes to assert only hub.writers is removed when hub.readers have the same fd id Co-authored-by: Idan Haim Shalom Co-authored-by: Tomer Nosrati --- celery/concurrency/asynpool.py | 8 +-- t/unit/concurrency/test_prefork.py | 96 ++++++++++++++++++++++++++++++ t/unit/worker/test_loops.py | 6 +- 3 files changed, 103 insertions(+), 7 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index e1912b05b7a..7f51307c6c4 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -772,7 +772,7 @@ def on_poll_start(): None, WRITE | ERR, consolidate=True) else: iterate_file_descriptors_safely( - inactive, all_inqueues, hub_remove) + inactive, all_inqueues, hub.remove_writer) self.on_poll_start = on_poll_start def on_inqueue_close(fd, proc): @@ -818,7 +818,7 @@ def schedule_writes(ready_fds, total_write_count=None): # worker is already busy with another task continue if ready_fd not in all_inqueues: - hub_remove(ready_fd) + hub.remove_writer(ready_fd) continue try: job = pop_message() @@ -829,7 +829,7 @@ def schedule_writes(ready_fds, total_write_count=None): # this may create a spinloop where the event loop # always wakes up. for inqfd in diff(active_writes): - hub_remove(inqfd) + hub.remove_writer(inqfd) break else: @@ -927,7 +927,7 @@ def _write_job(proc, fd, job): else: errors = 0 finally: - hub_remove(fd) + hub.remove_writer(fd) write_stats[proc.index] += 1 # message written, so this fd is now available active_writes.discard(fd) diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index 7690ef09a40..eda7cee519f 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -5,6 +5,8 @@ from unittest.mock import Mock, patch import pytest +from billiard.pool import ApplyResult +from kombu.asynchronous import Hub import t.skip from celery.app.defaults import DEFAULTS @@ -354,6 +356,100 @@ def _fake_hub(*args, **kwargs): # Then: all items were removed from the managed data source assert fd_iter == {}, "Expected all items removed from managed dict" + def _get_hub(self): + hub = Hub() + hub.readers = {} + hub.writers = {} + hub.timer = Mock(name='hub.timer') + hub.timer._queue = [Mock()] + hub.fire_timers = Mock(name='hub.fire_timers') + hub.fire_timers.return_value = 1.7 + hub.poller = Mock(name='hub.poller') + hub.close = Mock(name='hub.close()') + return hub + + def test_schedule_writes_hub_remove_writer_ready_fd_not_in_all_inqueues(self): + pool = asynpool.AsynPool(threads=False) + hub = self._get_hub() + + writer = Mock(name='writer') + reader = Mock(name='reader') + + # add 2 fake fds with the same id + hub.add_reader(6, reader, 6) + hub.add_writer(6, writer, 6) + pool._all_inqueues.clear() + pool._create_write_handlers(hub) + + # check schedule_writes write fds remove not remove the reader one from the hub. + hub.consolidate_callback(ready_fds=[6]) + assert 6 in hub.readers + assert 6 not in hub.writers + + def test_schedule_writes_hub_remove_writers_from_active_writers_when_get_index_error(self): + pool = asynpool.AsynPool(threads=False) + hub = self._get_hub() + + writer = Mock(name='writer') + reader = Mock(name='reader') + + # add 3 fake fds with the same id to reader and writer + hub.add_reader(6, reader, 6) + hub.add_reader(8, reader, 8) + hub.add_reader(9, reader, 9) + hub.add_writer(6, writer, 6) + hub.add_writer(8, writer, 8) + hub.add_writer(9, writer, 9) + + # add fake fd to pool _all_inqueues to make sure we try to read from outbound_buffer + # set active_writes to 6 to make sure we remove all write fds except 6 + pool._active_writes = {6} + pool._all_inqueues = {2, 6, 8, 9} + + pool._create_write_handlers(hub) + + # clear outbound_buffer to get IndexError when trying to pop any message + # in this case all active_writers fds will be removed from the hub + pool.outbound_buffer.clear() + + hub.consolidate_callback(ready_fds=[2]) + if {6, 8, 9} <= hub.readers.keys() and not {8, 9} <= hub.writers.keys(): + assert True + else: + assert False + + assert 6 in hub.writers + + def test_schedule_writes_hub_remove_fd_only_from_writers_when_write_job_is_done(self): + pool = asynpool.AsynPool(threads=False) + hub = self._get_hub() + + writer = Mock(name='writer') + reader = Mock(name='reader') + + # add one writer and one reader with the same fd + hub.add_writer(2, writer, 2) + hub.add_reader(2, reader, 2) + assert 2 in hub.writers + + # For test purposes to reach _write_job in schedule writes + pool._all_inqueues = {2} + worker = Mock("worker") + # this lambda need to return a number higher than 4 + # to pass the while loop in _write_job function and to reach the hub.remove_writer + worker.send_job_offset = lambda header, HW: 5 + + pool._fileno_to_inq[2] = worker + pool._create_write_handlers(hub) + + result = ApplyResult({}, lambda x: True) + result._payload = [None, None, -1] + pool.outbound_buffer.appendleft(result) + + hub.consolidate_callback(ready_fds=[2]) + assert 2 not in hub.writers + assert 2 in hub.readers + def test_register_with_event_loop__no_on_tick_dupes(self): """Ensure AsynPool's register_with_event_loop only registers on_poll_start in the event loop the first time it's called. This diff --git a/t/unit/worker/test_loops.py b/t/unit/worker/test_loops.py index 68e84562b4c..754a3a119c7 100644 --- a/t/unit/worker/test_loops.py +++ b/t/unit/worker/test_loops.py @@ -363,7 +363,7 @@ def test_poll_err_writable(self): def test_poll_write_generator(self): x = X(self.app) - x.hub.remove = Mock(name='hub.remove()') + x.hub.remove_writer = Mock(name='hub.remove_writer()') def Gen(): yield 1 @@ -376,7 +376,7 @@ def Gen(): with pytest.raises(socket.error): asynloop(*x.args) assert gen.gi_frame.f_lasti != -1 - x.hub.remove.assert_not_called() + x.hub.remove_writer.assert_not_called() def test_poll_write_generator_stopped(self): x = X(self.app) @@ -388,7 +388,7 @@ def Gen(): x.hub.add_writer(6, gen) x.hub.on_tick.add(x.close_then_error(Mock(name='tick'), 2)) x.hub.poller.poll.return_value = [(6, WRITE)] - x.hub.remove = Mock(name='hub.remove()') + x.hub.remove_writer = Mock(name='hub.remove_writer()') with pytest.raises(socket.error): asynloop(*x.args) assert gen.gi_frame is None From 5b33bc16bfcc4ebc64cacadc2814cd6a6419c06a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 20:36:49 +0300 Subject: [PATCH 0737/1051] [pre-commit.ci] pre-commit autoupdate (#9077) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 7.0.0 → 7.1.0](https://github.com/PyCQA/flake8/compare/7.0.0...7.1.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 63ed85c214a..8c8ffa517dd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 + rev: 7.1.0 hooks: - id: flake8 From 20c15764b084d0e0d4459faed30fc6bf989c0b4c Mon Sep 17 00:00:00 2001 From: Johannes Faigle Date: Wed, 19 Jun 2024 10:06:44 +0200 Subject: [PATCH 0738/1051] Class method to process crontab string (#9079) * add class method to probcess crontab string * typo typo typo * Update celery/schedules.py Co-authored-by: Johannes Faigle * Add tests for crontab.from_string Plus pacify mypy * Update docs * Update contributors.txt --------- Co-authored-by: alex.pajak@gmail.com Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- CONTRIBUTORS.txt | 3 ++- celery/schedules.py | 17 +++++++++++++++++ t/unit/app/test_schedules.py | 16 ++++++++++++++++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 6159effcc3a..184a2538e5a 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -296,4 +296,5 @@ Kaustav Banerjee, 2022/11/10 Austin Snoeyink 2022/12/06 Jeremy Z. Othieno 2023/07/27 Tomer Nosrati, 2022/17/07 -Andy Zickler, 2024/01/18 \ No newline at end of file +Andy Zickler, 2024/01/18 +Johannes Faigle, 2024/06/18 diff --git a/celery/schedules.py b/celery/schedules.py index a60dd27ba3b..9cd051004e7 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -411,6 +411,23 @@ def __init__(self, minute: str = '*', hour: str = '*', day_of_week: str = '*', self.month_of_year = self._expand_cronspec(month_of_year, 12, 1) super().__init__(**kwargs) + @classmethod + def from_string(cls, crontab: str) -> crontab: + """ + Create a Crontab from a cron expression string. For example ``crontab.from_string('* * * * *')``. + + .. code-block:: text + + ┌───────────── minute (0–59) + │ ┌───────────── hour (0–23) + │ │ ┌───────────── day of the month (1–31) + │ │ │ ┌───────────── month (1–12) + │ │ │ │ ┌───────────── day of the week (0–6) (Sunday to Saturday) + * * * * * + """ + minute, hour, day_of_month, month_of_year, day_of_week = crontab.split(" ") + return cls(minute, hour, day_of_week, day_of_month, month_of_year) + @staticmethod def _expand_cronspec( cronspec: int | str | Iterable, diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index b9285e64d93..63689831bdf 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -246,6 +246,22 @@ def test_eq(self): assert crontab(month_of_year='1') != schedule(10) +class test_crontab_from_string: + + def test_every_minute(self): + assert crontab.from_string('* * * * *') == crontab() + + def test_every_minute_on_sunday(self): + assert crontab.from_string('* * * * SUN') == crontab(day_of_week='SUN') + + def test_once_per_month(self): + assert crontab.from_string('0 8 5 * *') == crontab(minute=0, hour=8, day_of_month=5) + + def test_invalid_crontab_string(self): + with pytest.raises(ValueError): + crontab.from_string('*') + + class test_crontab_remaining_estimate: def crontab(self, *args, **kwargs): From 53e96fa6eebd23aee782c2e23aa91a525151f65e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 23 Jun 2024 16:55:31 +0300 Subject: [PATCH 0739/1051] Fixed smoke tests env bug when using integration tasks that rely on Redis (#9090) --- t/smoke/conftest.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index c7f856fef3a..6c183a84dcd 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -2,7 +2,7 @@ import pytest from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer -from pytest_docker_tools import container, fetch, network +from pytest_docker_tools import container, fetch from t.smoke.operations.task_termination import TaskTermination from t.smoke.operations.worker_kill import WorkerKill @@ -45,22 +45,30 @@ def default_worker_tasks(default_worker_tasks: set) -> set: # to be used by the integration tests tasks. redis_image = fetch(repository=REDIS_IMAGE) -redis_test_container_network = network(scope="session") redis_test_container: RedisContainer = container( image="{redis_image.id}", - scope="session", ports=REDIS_PORTS, environment=REDIS_ENV, - network="{redis_test_container_network.name}", + network="{default_pytest_celery_network.name}", wrapper_class=RedisContainer, timeout=REDIS_CONTAINER_TIMEOUT, ) -@pytest.fixture( - scope="session", - autouse=True, # Ensure the configuration is applied automatically -) +@pytest.fixture(autouse=True) def set_redis_test_container(redis_test_container: RedisContainer): """Configure the Redis test container to be used by the integration tests tasks.""" + # get_redis_connection(): will use these settings in the tests environment + os.environ["REDIS_HOST"] = "localhost" os.environ["REDIS_PORT"] = str(redis_test_container.port) + + +@pytest.fixture +def default_worker_env(default_worker_env: dict, redis_test_container: RedisContainer) -> dict: + """Add the Redis connection details to the worker environment.""" + # get_redis_connection(): will use these settings when executing tasks in the worker + default_worker_env.update({ + "REDIS_HOST": redis_test_container.hostname, + "REDIS_PORT": 6379, + }) + return default_worker_env From cd52e4db159888a45225b71bc56e5dbd275a13e4 Mon Sep 17 00:00:00 2001 From: DorSSS Date: Mon, 24 Jun 2024 02:17:45 +0300 Subject: [PATCH 0740/1051] Bugfix - a task will run multiple times when chaining chains with groups (#9021) * add the fix + tests * unchain only the other chain * fixes for pre-commit * Update celery/canvas.py Co-authored-by: Nils Caspar * Added smoke test --------- Co-authored-by: Dor.Shtainman Co-authored-by: Asif Saif Uddin Co-authored-by: Nils Caspar Co-authored-by: Tomer Nosrati --- celery/canvas.py | 4 +--- t/integration/test_canvas.py | 19 +++++++++++++++++++ t/smoke/tests/test_canvas.py | 21 ++++++++++++++++++++- t/unit/tasks/test_canvas.py | 10 ++++++++++ 4 files changed, 50 insertions(+), 4 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index cf322f3b8a1..9f4d2f0ce74 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -974,9 +974,7 @@ def __or__(self, other): tasks, other), app=self._app) elif isinstance(other, _chain): # chain | chain -> chain - # use type(self) for _chain subclasses - return type(self)(seq_concat_seq( - self.unchain_tasks(), other.unchain_tasks()), app=self._app) + return reduce(operator.or_, other.unchain_tasks(), self) elif isinstance(other, Signature): if self.tasks and isinstance(self.tasks[-1], group): # CHAIN [last item is group] | TASK -> chord diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index bb5b80ffa67..d2474fa2351 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1108,6 +1108,25 @@ def test_group_in_center_of_chain(self, manager): res = t3.apply_async() # should not raise assert res.get(timeout=TIMEOUT) == 60 + def test_upgrade_to_chord_inside_chains(self, manager): + if not manager.app.conf.result_backend.startswith("redis"): + raise pytest.skip("Requires redis result backend.") + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + redis_key = str(uuid.uuid4()) + group1 = group(redis_echo.si('a', redis_key), redis_echo.si('a', redis_key)) + group2 = group(redis_echo.si('a', redis_key), redis_echo.si('a', redis_key)) + chord1 = group1 | group2 + chain1 = chain(chord1, (redis_echo.si('a', redis_key) | redis_echo.si('b', redis_key))) + chain1.apply_async().get(timeout=TIMEOUT) + redis_connection = get_redis_connection() + actual = redis_connection.lrange(redis_key, 0, -1) + assert actual.count(b'b') == 1 + redis_connection.delete(redis_key) + class test_result_set: diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 7ecf838af90..6590315f024 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,8 +1,11 @@ +import uuid + import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature -from t.integration.tasks import ExpectedException, add, fail, identity +from t.integration.conftest import get_redis_connection +from t.integration.tasks import ExpectedException, add, fail, identity, redis_echo class test_signature: @@ -52,6 +55,22 @@ def test_chain_gets_last_task_id_with_failing_tasks_in_chain(self, celery_setup: with pytest.raises(ExpectedException): res.get(timeout=RESULT_TIMEOUT) + def test_upgrade_to_chord_inside_chains(self, celery_setup: CeleryTestSetup): + redis_key = str(uuid.uuid4()) + queue = celery_setup.worker.worker_queue + group1 = group(redis_echo.si("a", redis_key), redis_echo.si("a", redis_key)) + group2 = group(redis_echo.si("a", redis_key), redis_echo.si("a", redis_key)) + chord1 = group1 | group2 + chain1 = chain( + chord1, (redis_echo.si("a", redis_key) | redis_echo.si("b", redis_key).set(queue=queue)) + ) + chain1.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) + redis_connection = get_redis_connection() + actual = redis_connection.lrange(redis_key, 0, -1) + assert actual.count(b"a") == 5 + assert actual.count(b"b") == 1 + redis_connection.delete(redis_key) + class test_chord: def test_sanity(self, celery_setup: CeleryTestSetup): diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 9bd4f6b75dd..1f901376205 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -825,6 +825,16 @@ def test_group_in_center_of_chain(self): t2 = chord([self.add.si(1, 1), self.add.si(1, 1)], t1) t2.freeze() # should not raise + def test_upgrade_to_chord_on_chain(self): + group1 = group(self.add.si(10, 10), self.add.si(10, 10)) + group2 = group(self.xsum.s(), self.xsum.s()) + chord1 = group1 | group2 + chain1 = (self.xsum.si([5]) | self.add.s(1)) + final_task = chain(chord1, chain1) + assert len(final_task.tasks) == 1 and isinstance(final_task.tasks[0], chord) + assert isinstance(final_task.tasks[0].body, chord) + assert final_task.tasks[0].body.body == chain1 + class test_group(CanvasCase): def test_repr(self): From 418cf3321653b6cdd9c9524c832e5cd7523f8364 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Jun 2024 03:23:19 +0300 Subject: [PATCH 0741/1051] Bump mypy from 1.10.0 to 1.10.1 (#9096) Bumps [mypy](https://github.com/python/mypy) from 1.10.0 to 1.10.1. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.10.0...v1.10.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index bb4464df96d..c6dcb43c76b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.2.1 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.10.0; platform_python_implementation=="CPython" +mypy==1.10.1; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt From 50732166b645013626b28fc015ddd95895b6c5a3 Mon Sep 17 00:00:00 2001 From: Rimvydas Naktinis Date: Wed, 26 Jun 2024 17:37:57 +0200 Subject: [PATCH 0742/1051] Don't add a separator to global_keyprefix if it already has one (#9080) --- celery/backends/base.py | 8 +++++--- t/unit/backends/test_base.py | 20 +++++++++++++++++++- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 22cdc2ebff6..3a29f1e9996 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -833,9 +833,11 @@ def _add_global_keyprefix(self): """ global_keyprefix = self.app.conf.get('result_backend_transport_options', {}).get("global_keyprefix", None) if global_keyprefix: - self.task_keyprefix = f"{global_keyprefix}_{self.task_keyprefix}" - self.group_keyprefix = f"{global_keyprefix}_{self.group_keyprefix}" - self.chord_keyprefix = f"{global_keyprefix}_{self.chord_keyprefix}" + if global_keyprefix[-1] not in ':_-.': + global_keyprefix += '_' + self.task_keyprefix = f"{global_keyprefix}{self.task_keyprefix}" + self.group_keyprefix = f"{global_keyprefix}{self.group_keyprefix}" + self.chord_keyprefix = f"{global_keyprefix}{self.chord_keyprefix}" def _encode_prefixes(self): self.task_keyprefix = self.key_t(self.task_keyprefix) diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index f2ede1503e2..0d4550732bf 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -760,7 +760,7 @@ def test_strip_prefix(self): assert self.b._strip_prefix('x1b34') == 'x1b34' def test_global_keyprefix(self): - global_keyprefix = "test_global_keyprefix_" + global_keyprefix = "test_global_keyprefix" app = copy.deepcopy(self.app) app.conf.get('result_backend_transport_options', {}).update({"global_keyprefix": global_keyprefix}) b = KVBackend(app=app) @@ -769,6 +769,24 @@ def test_global_keyprefix(self): assert bytes_to_str(b.get_key_for_group(tid)) == f"{global_keyprefix}_celery-taskset-meta-{tid}" assert bytes_to_str(b.get_key_for_chord(tid)) == f"{global_keyprefix}_chord-unlock-{tid}" + global_keyprefix = "test_global_keyprefix_" + app = copy.deepcopy(self.app) + app.conf.get('result_backend_transport_options', {}).update({"global_keyprefix": global_keyprefix}) + b = KVBackend(app=app) + tid = uuid() + assert bytes_to_str(b.get_key_for_task(tid)) == f"{global_keyprefix}celery-task-meta-{tid}" + assert bytes_to_str(b.get_key_for_group(tid)) == f"{global_keyprefix}celery-taskset-meta-{tid}" + assert bytes_to_str(b.get_key_for_chord(tid)) == f"{global_keyprefix}chord-unlock-{tid}" + + global_keyprefix = "test_global_keyprefix:" + app = copy.deepcopy(self.app) + app.conf.get('result_backend_transport_options', {}).update({"global_keyprefix": global_keyprefix}) + b = KVBackend(app=app) + tid = uuid() + assert bytes_to_str(b.get_key_for_task(tid)) == f"{global_keyprefix}celery-task-meta-{tid}" + assert bytes_to_str(b.get_key_for_group(tid)) == f"{global_keyprefix}celery-taskset-meta-{tid}" + assert bytes_to_str(b.get_key_for_chord(tid)) == f"{global_keyprefix}chord-unlock-{tid}" + def test_global_keyprefix_missing(self): tid = uuid() assert bytes_to_str(self.b.get_key_for_task(tid)) == f"celery-task-meta-{tid}" From 6c76726e489cd54920eb299722148568f0517f62 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Jun 2024 12:28:16 +0300 Subject: [PATCH 0743/1051] Update pymongo[srv] requirement from <4.8,>=4.0.2 to >=4.0.2,<4.9 (#9111) Updates the requirements on [pymongo[srv]](https://github.com/mongodb/mongo-python-driver) to permit the latest version. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.0.2...4.8.0) --- updated-dependencies: - dependency-name: pymongo[srv] dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index e7c9111e8c4..04d59283325 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo[srv]>=4.0.2, <4.8 +pymongo[srv]>=4.0.2, <4.9 From 87f6893e4dab758c09e7eb16618129157753e734 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 20:01:48 +0300 Subject: [PATCH 0744/1051] [pre-commit.ci] pre-commit autoupdate (#9114) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.10.0 → v1.10.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.10.0...v1.10.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c8ffa517dd..0cb91803762 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.10.1 hooks: - id: mypy pass_filenames: false From c5f245e0f52680821fb4fc30d9f1c53f81a1543b Mon Sep 17 00:00:00 2001 From: Giovanni <63993401+giovanni1106@users.noreply.github.com> Date: Wed, 3 Jul 2024 07:49:39 -0300 Subject: [PATCH 0745/1051] Added missing import in examples for Django (#9099) * docs: add missing import * add name in authors * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * refactor: change config to using settings --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- CONTRIBUTORS.txt | 1 + examples/django/proj/celery.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 184a2538e5a..9c3534b3358 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -298,3 +298,4 @@ Jeremy Z. Othieno 2023/07/27 Tomer Nosrati, 2022/17/07 Andy Zickler, 2024/01/18 Johannes Faigle, 2024/06/18 +Giovanni Giampauli, 2024/06/26 diff --git a/examples/django/proj/celery.py b/examples/django/proj/celery.py index ec3354dcdf3..182da54fb55 100644 --- a/examples/django/proj/celery.py +++ b/examples/django/proj/celery.py @@ -1,5 +1,7 @@ import os +from django.conf import settings + from celery import Celery # Set the default Django settings module for the 'celery' program. @@ -11,7 +13,7 @@ # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings', namespace='CELERY') +app.config_from_object(f'django.conf:{settings.__name__}', namespace='CELERY') # Load task modules from all registered Django apps. app.autodiscover_tasks() From 0909d1d482994fe2ba83fd658b710acd90cc2339 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 3 Jul 2024 15:38:01 +0300 Subject: [PATCH 0746/1051] Bump Kombu to v5.4.0rc1 (#9117) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 02918bd1eff..e42cbec9b47 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.3.4,<6.0 +kombu>=5.4.0rc1,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From bd3b3c6afb8b0c44b416f469b3db6a725d71b609 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 3 Jul 2024 16:57:58 +0300 Subject: [PATCH 0747/1051] Removed skipping Redis in t/smoke/tests/test_consumer.py tests (#9118) --- t/smoke/tests/test_consumer.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index c070b84c31a..042451f2980 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery import Celery from celery.canvas import chain, group @@ -51,9 +51,6 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Potential Bug with Redis Broker") - expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) @@ -74,9 +71,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() @@ -95,9 +89,6 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): - if isinstance(celery_setup.broker, RedisTestBroker): - pytest.xfail("Real Bug: Broker does not fetch messages after restart") - sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) r = sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() From 401c7715bedb3898aa07293f77cf59d40f2a3705 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 7 Jul 2024 09:53:39 -0700 Subject: [PATCH 0748/1051] Update pytest-subtests from 0.12.1 to 0.13.0 (#9120) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index c6dcb43c76b..3d468331b06 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,7 +1,7 @@ pytest==8.2.2 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 -pytest-subtests==0.12.1 +pytest-subtests==0.13.0 pytest-timeout==2.3.1 pytest-click==1.1.0 pytest-order==1.2.1 From 2859554a4707e53a1cdfe79dc073f5a8c4348f21 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 8 Jul 2024 02:22:06 +0300 Subject: [PATCH 0749/1051] Increased smoke tests CI timeout (#9122) --- .github/workflows/python-package.yml | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index ad7bd024373..143180828fb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -129,7 +129,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -158,7 +157,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 10 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover @@ -167,7 +166,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -196,7 +194,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 15 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k stamping @@ -205,7 +203,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -234,7 +231,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 5 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py @@ -243,7 +240,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -272,7 +268,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 10 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py @@ -281,7 +277,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -310,7 +305,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 5 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py @@ -319,7 +314,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -348,7 +342,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 5 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py @@ -357,7 +351,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -386,7 +379,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 10 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py @@ -395,7 +388,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false @@ -424,7 +416,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 10 + timeout-minutes: 20 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py @@ -433,7 +425,6 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - timeout-minutes: 240 runs-on: ubuntu-latest strategy: fail-fast: false From 5d32121131dcd7bd195d28730acb729ee9b30683 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 11 Jul 2024 21:55:23 +0300 Subject: [PATCH 0750/1051] Bump Kombu to v5.4.0rc2 (#9127) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index e42cbec9b47..eb08a9be4ec 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.4.0rc1,<6.0 +kombu>=5.4.0rc2,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From b09634f21e809fbe686c58d6edc7b15fc2a9d937 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Mon, 15 Jul 2024 04:38:19 +0300 Subject: [PATCH 0751/1051] Update zstandard from 0.22.0 to 0.23.0 --- requirements/extras/zstd.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/zstd.txt b/requirements/extras/zstd.txt index 70ad0df0e95..ca872b12c41 100644 --- a/requirements/extras/zstd.txt +++ b/requirements/extras/zstd.txt @@ -1 +1 @@ -zstandard==0.22.0 +zstandard==0.23.0 From fa40468009f31ca0ed6c051872080a5a5e8367c1 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 17 Jul 2024 02:45:30 -0700 Subject: [PATCH 0752/1051] Update pytest-subtests from 0.13.0 to 0.13.1 (#9130) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 3d468331b06..42679fe8dea 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,7 +1,7 @@ pytest==8.2.2 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 -pytest-subtests==0.13.0 +pytest-subtests==0.13.1 pytest-timeout==2.3.1 pytest-click==1.1.0 pytest-order==1.2.1 From bf88b74edb17a389c4d2fb796a2947ff4f6abed2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 19 Jul 2024 00:55:53 +0300 Subject: [PATCH 0753/1051] Changed retry to tenacity (#9133) --- t/smoke/tests/test_tasks.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index f4748296b8b..e55a4b41f30 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -2,7 +2,7 @@ import pytest from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, CeleryWorkerCluster -from retry import retry +from tenacity import retry, stop_after_attempt, wait_fixed from celery import Celery, signature from celery.exceptions import TimeLimitExceeded, WorkerLostError @@ -43,7 +43,11 @@ def test_child_process_respawn( self.apply_self_termination_task(celery_setup.worker, method).get() # Allowing the worker to respawn the child process before we continue - @retry(tries=42, delay=0.1) # 4.2 seconds + @retry( + stop=stop_after_attempt(42), + wait=wait_fixed(0.1), + reraise=True, + ) def wait_for_two_celery_processes(): pinfo_current = celery_setup.worker.get_running_processes_info( ["pid", "name"], From 7385b1bdff41d7db73287720a7324cf46d45d3ea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Jul 2024 14:52:38 +0300 Subject: [PATCH 0754/1051] Bump mypy from 1.10.1 to 1.11.0 (#9135) Bumps [mypy](https://github.com/python/mypy) from 1.10.1 to 1.11.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.10.1...v1.11) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 42679fe8dea..8e14d08d613 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.2.1 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.10.1; platform_python_implementation=="CPython" +mypy==1.11.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.6.1; python_version >= '3.9' -r extras/yaml.txt From 237504c4c3f320155c333c0fb659e9a6e17153ab Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 21 Jul 2024 04:53:54 -0700 Subject: [PATCH 0755/1051] Update cryptography from 42.0.8 to 43.0.0 (#9138) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index a7ee686f2d5..ce12e287454 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==42.0.8 +cryptography==43.0.0 From 40408ab0d85b43d24f5efa71eaa3c707b5a325d4 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 21 Jul 2024 05:05:53 -0700 Subject: [PATCH 0756/1051] Update pytest from 8.2.2 to 8.3.1 (#9137) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 8e14d08d613..493fc6df658 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.2.2 +pytest==8.3.1 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 From 98e3d86386bc8165f2ba86d87c457382f936cd79 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 22 Jul 2024 00:49:58 +0300 Subject: [PATCH 0757/1051] Added support for Quorum Queues (#9121) * Added new string config "task_default_queue_type" with default "classic" * Added new bool config "worker_detect_quorum_queues" with default True * Set default queue argument "x-queue-type" to "quorum" if task_default_queue_type is "quorum" * Automatically disable global QoS if quorum queues are detected * Added tests * Added docs * Added examples/quorum-queues * Removed confirm_publish warning in favor of docs * Added smoke tests --- celery/app/amqp.py | 6 +- celery/app/defaults.py | 2 + celery/worker/consumer/tasks.py | 62 +++++++- docs/userguide/configuration.rst | 50 ++++++ examples/quorum-queues/declare_queue.py | 15 ++ examples/quorum-queues/myapp.py | 149 ++++++++++++++++++ examples/quorum-queues/setup_cluster.sh | 117 ++++++++++++++ examples/quorum-queues/test_cluster.sh | 41 +++++ t/smoke/tests/quorum_queues/__init__.py | 0 t/smoke/tests/quorum_queues/conftest.py | 119 ++++++++++++++ .../tests/quorum_queues/test_quorum_queues.py | 36 +++++ t/unit/app/test_amqp.py | 9 +- t/unit/worker/test_consumer.py | 62 +++++++- 13 files changed, 658 insertions(+), 10 deletions(-) create mode 100755 examples/quorum-queues/declare_queue.py create mode 100644 examples/quorum-queues/myapp.py create mode 100755 examples/quorum-queues/setup_cluster.sh create mode 100755 examples/quorum-queues/test_cluster.sh create mode 100644 t/smoke/tests/quorum_queues/__init__.py create mode 100644 t/smoke/tests/quorum_queues/conftest.py create mode 100644 t/smoke/tests/quorum_queues/test_quorum_queues.py diff --git a/celery/app/amqp.py b/celery/app/amqp.py index e6aae3f8b3c..575117d13e1 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -249,9 +249,13 @@ def Queues(self, queues, create_missing=None, if max_priority is None: max_priority = conf.task_queue_max_priority if not queues and conf.task_default_queue: + queue_arguments = None + if conf.task_default_queue_type == 'quorum': + queue_arguments = {'x-queue-type': 'quorum'} queues = (Queue(conf.task_default_queue, exchange=self.default_exchange, - routing_key=default_routing_key),) + routing_key=default_routing_key, + queue_arguments=queue_arguments),) autoexchange = (self.autoexchange if autoexchange is None else autoexchange) return self.queues_cls( diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 523b56d72f6..b9aaf66ef65 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -261,6 +261,7 @@ def __repr__(self): inherit_parent_priority=Option(False, type='bool'), default_delivery_mode=Option(2, type='string'), default_queue=Option('celery'), + default_queue_type=Option('classic', type='string'), default_exchange=Option(None, type='string'), # taken from queue default_exchange_type=Option('direct'), default_routing_key=Option(None, type='string'), # taken from queue @@ -345,6 +346,7 @@ def __repr__(self): task_log_format=Option(DEFAULT_TASK_LOG_FMT), timer=Option(type='string'), timer_precision=Option(1.0, type='float'), + detect_quorum_queues=Option(True, type='bool'), ), ) diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index b4e4aee99ec..12f9b6a33b5 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -1,7 +1,13 @@ """Worker Task Consumer Bootstep.""" + +from __future__ import annotations + +import warnings + from kombu.common import QoS, ignore_errors from celery import bootsteps +from celery.exceptions import CeleryWarning from celery.utils.log import get_logger from .mingle import Mingle @@ -12,6 +18,16 @@ debug = logger.debug +ETA_TASKS_NO_GLOBAL_QOS_WARNING = """ +Detected quorum queue "%r", disabling global QoS. +With global QoS disabled, ETA tasks may not function as expected. Instead of adjusting +the prefetch count dynamically, ETA tasks will occupy the prefetch buffer, potentially +blocking other tasks from being consumed. To mitigate this, either set a high prefetch +count or avoid using quorum queues until the ETA mechanism is updated to support a +disabled global QoS, which is required for quorum queues. +""" + + class Tasks(bootsteps.StartStopStep): """Bootstep starting the task message consumer.""" @@ -25,10 +41,7 @@ def start(self, c): """Start task consumer.""" c.update_strategies() - # - RabbitMQ 3.3 completely redefines how basic_qos works... - # This will detect if the new qos semantics is in effect, - # and if so make sure the 'apply_global' flag is set on qos updates. - qos_global = not c.connection.qos_semantics_matches_spec + qos_global = self.qos_global(c) # set initial prefetch count c.connection.default_channel.basic_qos( @@ -63,3 +76,44 @@ def shutdown(self, c): def info(self, c): """Return task consumer info.""" return {'prefetch_count': c.qos.value if c.qos else 'N/A'} + + def qos_global(self, c) -> bool: + """Determine if global QoS should be applied. + + Additional information: + https://www.rabbitmq.com/docs/consumer-prefetch + https://www.rabbitmq.com/docs/quorum-queues#global-qos + """ + # - RabbitMQ 3.3 completely redefines how basic_qos works... + # This will detect if the new qos semantics is in effect, + # and if so make sure the 'apply_global' flag is set on qos updates. + qos_global = not c.connection.qos_semantics_matches_spec + + if c.app.conf.worker_detect_quorum_queues: + using_quorum_queues, qname = self.detect_quorum_queues(c) + if using_quorum_queues: + qos_global = False + # The ETA tasks mechanism requires additional work for Celery to fully support + # quorum queues. Warn the user that ETA tasks may not function as expected until + # this is done so we can at least support quorum queues partially for now. + warnings.warn(ETA_TASKS_NO_GLOBAL_QOS_WARNING % (qname,), CeleryWarning) + + return qos_global + + def detect_quorum_queues(self, c) -> tuple[bool, str]: + """Detect if any of the queues are quorum queues. + + Returns: + tuple[bool, str]: A tuple containing a boolean indicating if any of the queues are quorum queues + and the name of the first quorum queue found or an empty string if no quorum queues were found. + """ + is_rabbitmq_broker = c.app.conf.broker_url.startswith(("amqp", "pyamqp")) + + if is_rabbitmq_broker: + queues = c.app.amqp.queues + for qname in queues: + qarguments = queues[qname].queue_arguments or {} + if qarguments.get("x-queue-type") == "quorum": + return True, qname + + return False, "" diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index f5c3f280aa4..1250f4ff16e 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -137,6 +137,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_DEFAULT_EXCHANGE`` :setting:`task_default_exchange` ``CELERY_DEFAULT_EXCHANGE_TYPE`` :setting:`task_default_exchange_type` ``CELERY_DEFAULT_QUEUE`` :setting:`task_default_queue` +``CELERY_DEFAULT_QUEUE_TYPE`` :setting:`task_default_queue_type` ``CELERY_DEFAULT_RATE_LIMIT`` :setting:`task_default_rate_limit` ``CELERY_DEFAULT_ROUTING_KEY`` :setting:`task_default_routing_key` ``CELERY_EAGER_PROPAGATES`` :setting:`task_eager_propagates` @@ -176,6 +177,7 @@ have been moved into a new ``task_`` prefix. ``CELERY_WORKER_TASK_LOG_FORMAT`` :setting:`worker_task_log_format` ``CELERYD_TIMER`` :setting:`worker_timer` ``CELERYD_TIMER_PRECISION`` :setting:`worker_timer_precision` +``CELERYD_DETECT_QUORUM_QUEUES`` :setting:`worker_detect_quorum_queues` ========================================== ============================================== Configuration Directives @@ -2606,6 +2608,42 @@ that queue. :ref:`routing-changing-default-queue` +.. setting:: task_default_queue_type + +``task_default_queue_type`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Default: ``"classic"``. + +This setting is used to allow changing the default queue type for the +:setting:`task_default_queue` queue. The other viable option is ``"quorum"`` which +is only supported by RabbitMQ and sets the queue type to ``quorum`` using the ``x-queue-type`` +queue argument. + +If the :setting:`worker_detect_quorum_queues` setting is enabled, the worker will +automatically detect the queue type and disable the global QoS accordingly. + +.. warning:: + + When using quorum queues, ETA tasks may not function as expected. Instead of adjusting + the prefetch count dynamically, ETA tasks will occupy the prefetch buffer, potentially + blocking other tasks from being consumed. To mitigate this, either set a high prefetch + count or avoid using quorum queues until the ETA mechanism is updated to support a + disabled global QoS, which is required for quorum queues. + +.. warning:: + + Quorum queues require confirm publish to be enabled. + Use :setting:`broker_transport_options` to enable confirm publish by setting: + + .. code-block:: python + + broker_transport_options = {"confirm_publish": True} + + For more information, see `RabbitMQ documentation `_. + .. setting:: task_default_exchange ``task_default_exchange`` @@ -3225,6 +3263,18 @@ are recorded as such in the result backend as long as :setting:`task_ignore_resu will be set to ``True`` by default as the current behavior leads to more problems than it solves. +.. setting:: worker_detect_quorum_queues + +``worker_detect_quorum_queues`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Default: Enabled. + +Automatically detect if any of the queues in :setting:`task_queues` are quorum queues +(including the :setting:`task_default_queue`) and disable the global QoS if any quorum queue is detected. + .. _conf-events: Events diff --git a/examples/quorum-queues/declare_queue.py b/examples/quorum-queues/declare_queue.py new file mode 100755 index 00000000000..4eaff0b88cb --- /dev/null +++ b/examples/quorum-queues/declare_queue.py @@ -0,0 +1,15 @@ +"""Create a quorum queue using Kombu.""" + +from kombu import Connection, Exchange, Queue + +my_quorum_queue = Queue( + "my-quorum-queue", + Exchange("default"), + routing_key="default", + queue_arguments={"x-queue-type": "quorum"}, +) + +with Connection("amqp://guest@localhost//") as conn: + channel = conn.channel() + my_quorum_queue.maybe_bind(conn) + my_quorum_queue.declare() diff --git a/examples/quorum-queues/myapp.py b/examples/quorum-queues/myapp.py new file mode 100644 index 00000000000..41698f3ce0f --- /dev/null +++ b/examples/quorum-queues/myapp.py @@ -0,0 +1,149 @@ +"""myapp.py + +Usage:: + + (window1)$ python myapp.py worker -l INFO + + (window2)$ celery shell + >>> from myapp import example + >>> example() + + +You can also specify the app to use with the `celery` command, +using the `-A` / `--app` option:: + + $ celery -A myapp worker -l INFO + +With the `-A myproj` argument the program will search for an app +instance in the module ``myproj``. You can also specify an explicit +name using the fully qualified form:: + + $ celery -A myapp:app worker -l INFO + +""" + +import os +from datetime import UTC, datetime, timedelta + +from declare_queue import my_quorum_queue + +from celery import Celery +from celery.canvas import group + +app = Celery("myapp", broker="amqp://guest@localhost//") + +# Use custom queue (Optional) or set the default queue type to "quorum" +# app.conf.task_queues = (my_quorum_queue,) # uncomment to use custom queue +app.conf.task_default_queue_type = "quorum" # comment to use classic queue + +# Required by Quorum Queues: https://www.rabbitmq.com/docs/quorum-queues#use-cases +app.conf.broker_transport_options = {"confirm_publish": True} + +# Reduce qos to 4 (Optional, useful for testing) +app.conf.worker_prefetch_multiplier = 1 +app.conf.worker_concurrency = 4 + +# Reduce logs (Optional, useful for testing) +app.conf.worker_heartbeat = None +app.conf.broker_heartbeat = 0 + + +def is_using_quorum_queues(app) -> bool: + queues = app.amqp.queues + for qname in queues: + qarguments = queues[qname].queue_arguments or {} + if qarguments.get("x-queue-type") == "quorum": + return True + + return False + + +@app.task +def add(x, y): + return x + y + + +@app.task +def identity(x): + return x + + +def example(): + queue = my_quorum_queue.name if my_quorum_queue in (app.conf.task_queues or {}) else "celery" + + while True: + print("Celery Quorum Queue Example") + print("===========================") + print("1. Send a simple identity task") + print("1.1 Send an ETA identity task") + print("2. Send a group of add tasks") + print("3. Inspect the active queues") + print("4. Shutdown Celery worker") + print("Q. Quit") + print("Q! Exit") + choice = input("Enter your choice (1-4 or Q): ") + + if choice == "1" or choice == "1.1": + queue_type = "Quorum" if is_using_quorum_queues(app) else "Classic" + payload = f"Hello, {queue_type} Queue!" + eta = datetime.now(UTC) + timedelta(seconds=30) + if choice == "1.1": + result = identity.si(payload).apply_async(queue=queue, eta=eta) + else: + result = identity.si(payload).apply_async(queue=queue) + print() + print(f"Task sent with ID: {result.id}") + print("Task type: identity") + + if choice == "1.1": + print(f"ETA: {eta}") + + print(f"Payload: {payload}") + + elif choice == "2": + tasks = [ + (1, 2), + (3, 4), + (5, 6), + ] + result = group( + add.s(*tasks[0]), + add.s(*tasks[1]), + add.s(*tasks[2]), + ).apply_async(queue=queue) + print() + print("Group of tasks sent.") + print(f"Group result ID: {result.id}") + for i, task_args in enumerate(tasks, 1): + print(f"Task {i} type: add") + print(f"Payload: {task_args}") + + elif choice == "3": + active_queues = app.control.inspect().active_queues() + print() + print("Active queues:") + for worker, queues in active_queues.items(): + print(f"Worker: {worker}") + for q in queues: + print(f" - {q['name']}") + + elif choice == "4": + print("Shutting down Celery worker...") + app.control.shutdown() + + elif choice.lower() == "q": + print("Quitting test()") + break + + elif choice.lower() == "q!": + print("Exiting...") + os.abort() + + else: + print("Invalid choice. Please enter a number between 1 and 4 or Q to quit.") + + print("\n" + "#" * 80 + "\n") + + +if __name__ == "__main__": + app.start() diff --git a/examples/quorum-queues/setup_cluster.sh b/examples/quorum-queues/setup_cluster.sh new file mode 100755 index 00000000000..f59501e9277 --- /dev/null +++ b/examples/quorum-queues/setup_cluster.sh @@ -0,0 +1,117 @@ +#!/bin/bash + +ERLANG_COOKIE="MYSECRETCOOKIE" + +cleanup() { + echo "Stopping and removing existing RabbitMQ containers..." + docker stop rabbit1 rabbit2 rabbit3 2>/dev/null + docker rm rabbit1 rabbit2 rabbit3 2>/dev/null + + echo "Removing existing Docker network..." + docker network rm rabbitmq-cluster 2>/dev/null +} + +wait_for_container() { + local container_name=$1 + local retries=20 + local count=0 + + until [ "$(docker inspect -f {{.State.Running}} $container_name)" == "true" ]; do + sleep 1 + count=$((count + 1)) + if [ $count -ge $retries ]; then + echo "Error: Container $container_name did not start in time." + exit 1 + fi + done +} + +wait_for_rabbitmq() { + local container_name=$1 + local retries=10 + local count=0 + + until docker exec -it $container_name rabbitmqctl status; do + sleep 1 + count=$((count + 1)) + if [ $count -ge $retries ]; then + echo "Error: RabbitMQ in container $container_name did not start in time." + exit 1 + fi + done +} + +setup_cluster() { + echo "Creating Docker network for RabbitMQ cluster..." + docker network create rabbitmq-cluster + + echo "Starting rabbit1 container..." + docker run -d --rm --name rabbit1 --hostname rabbit1 --net rabbitmq-cluster \ + -e RABBITMQ_NODENAME=rabbit@rabbit1 \ + -e RABBITMQ_ERLANG_COOKIE=$ERLANG_COOKIE \ + --net-alias rabbit1 \ + -p 15672:15672 -p 5672:5672 rabbitmq:3-management + + sleep 5 + wait_for_container rabbit1 + wait_for_rabbitmq rabbit1 + + # echo "Installing netcat in rabbit1 for debugging purposes..." + # docker exec -it rabbit1 bash -c "apt-get update && apt-get install -y netcat" + + echo "Starting rabbit2 container..." + docker run -d --rm --name rabbit2 --hostname rabbit2 --net rabbitmq-cluster \ + -e RABBITMQ_NODENAME=rabbit@rabbit2 \ + -e RABBITMQ_ERLANG_COOKIE=$ERLANG_COOKIE \ + --net-alias rabbit2 \ + -p 15673:15672 -p 5673:5672 rabbitmq:3-management + + sleep 5 + wait_for_container rabbit2 + wait_for_rabbitmq rabbit2 + + # echo "Installing netcat in rabbit2 for debugging purposes..." + # docker exec -it rabbit2 bash -c "apt-get update && apt-get install -y netcat" + + echo "Starting rabbit3 container..." + docker run -d --rm --name rabbit3 --hostname rabbit3 --net rabbitmq-cluster \ + -e RABBITMQ_NODENAME=rabbit@rabbit3 \ + -e RABBITMQ_ERLANG_COOKIE=$ERLANG_COOKIE \ + --net-alias rabbit3 \ + -p 15674:15672 -p 5674:5672 rabbitmq:3-management + + sleep 5 + wait_for_container rabbit3 + wait_for_rabbitmq rabbit3 + + # echo "Installing netcat in rabbit3 for debugging purposes..." + # docker exec -it rabbit3 bash -c "apt-get update && apt-get install -y netcat" + + echo "Joining rabbit2 to the cluster..." + docker exec -it rabbit2 rabbitmqctl stop_app + docker exec -it rabbit2 rabbitmqctl reset + docker exec -it rabbit2 rabbitmqctl join_cluster rabbit@rabbit1 + if [ $? -ne 0 ]; then + echo "Error: Failed to join rabbit2 to the cluster." + exit 1 + fi + docker exec -it rabbit2 rabbitmqctl start_app + + echo "Joining rabbit3 to the cluster..." + docker exec -it rabbit3 rabbitmqctl stop_app + docker exec -it rabbit3 rabbitmqctl reset + docker exec -it rabbit3 rabbitmqctl join_cluster rabbit@rabbit1 + if [ $? -ne 0 ]; then + echo "Error: Failed to join rabbit3 to the cluster." + exit 1 + fi + docker exec -it rabbit3 rabbitmqctl start_app + + echo "Verifying cluster status from rabbit1..." + docker exec -it rabbit1 rabbitmqctl cluster_status +} + +cleanup +setup_cluster + +echo "RabbitMQ cluster setup is complete." diff --git a/examples/quorum-queues/test_cluster.sh b/examples/quorum-queues/test_cluster.sh new file mode 100755 index 00000000000..c0b36bce521 --- /dev/null +++ b/examples/quorum-queues/test_cluster.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +QUEUE_NAME="my-quorum-queue" +VHOST="/" + +remove_existing_queue() { + docker exec -it rabbit1 rabbitmqctl delete_queue $QUEUE_NAME +} + +create_quorum_queue() { + docker exec -it rabbit1 rabbitmqadmin declare queue name=$QUEUE_NAME durable=true arguments='{"x-queue-type":"quorum"}' +} + +verify_quorum_queue() { + docker exec -it rabbit1 rabbitmqctl list_queues name type durable auto_delete arguments | grep $QUEUE_NAME +} + +send_test_message() { + docker exec -it rabbit1 rabbitmqadmin publish exchange=amq.default routing_key=$QUEUE_NAME payload='Hello, RabbitMQ!' +} + +receive_test_message() { + docker exec -it rabbit1 rabbitmqadmin get queue=$QUEUE_NAME ackmode=ack_requeue_false +} + +echo "Removing existing quorum queue if it exists..." +remove_existing_queue + +echo "Creating quorum queue..." +create_quorum_queue + +echo "Verifying quorum queue..." +verify_quorum_queue + +echo "Sending test message..." +send_test_message + +echo "Receiving test message..." +receive_test_message + +echo "Quorum queue setup and message test completed successfully." diff --git a/t/smoke/tests/quorum_queues/__init__.py b/t/smoke/tests/quorum_queues/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/t/smoke/tests/quorum_queues/conftest.py b/t/smoke/tests/quorum_queues/conftest.py new file mode 100644 index 00000000000..9111a97dd5a --- /dev/null +++ b/t/smoke/tests/quorum_queues/conftest.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +import os + +import pytest +from pytest_celery import RABBITMQ_PORTS, CeleryBrokerCluster, RabbitMQContainer, RabbitMQTestBroker, defaults +from pytest_docker_tools import build, container, fxtr + +from celery import Celery +from t.smoke.workers.dev import SmokeWorkerContainer + +############################################################################### +# RabbitMQ Management Broker +############################################################################### + + +class RabbitMQManagementBroker(RabbitMQTestBroker): + def get_management_url(self) -> str: + """Opening this link during debugging allows you to see the + RabbitMQ management UI in your browser. + + Usage from a test: + >>> celery_setup.broker.get_management_url() + + Open from a browser and login with guest:guest. + """ + ports = self.container.attrs["NetworkSettings"]["Ports"] + ip = ports["15672/tcp"][0]["HostIp"] + port = ports["15672/tcp"][0]["HostPort"] + return f"http://{ip}:{port}" + + +@pytest.fixture +def default_rabbitmq_broker_image() -> str: + return "rabbitmq:management" + + +@pytest.fixture +def default_rabbitmq_broker_ports() -> dict: + # Expose the management UI port + ports = RABBITMQ_PORTS.copy() + ports.update({"15672/tcp": None}) + return ports + + +@pytest.fixture +def celery_rabbitmq_broker(default_rabbitmq_broker: RabbitMQContainer) -> RabbitMQTestBroker: + broker = RabbitMQManagementBroker(default_rabbitmq_broker) + yield broker + broker.teardown() + + +@pytest.fixture +def celery_broker_cluster(celery_rabbitmq_broker: RabbitMQTestBroker) -> CeleryBrokerCluster: + cluster = CeleryBrokerCluster(celery_rabbitmq_broker) + yield cluster + cluster.teardown() + + +############################################################################### +# Worker Configuration +############################################################################### + + +class QuorumWorkerContainer(SmokeWorkerContainer): + @classmethod + def log_level(cls) -> str: + return "INFO" + + @classmethod + def worker_queue(cls) -> str: + return "celery" + + +@pytest.fixture +def default_worker_container_cls() -> type[SmokeWorkerContainer]: + return QuorumWorkerContainer + + +@pytest.fixture(scope="session") +def default_worker_container_session_cls() -> type[SmokeWorkerContainer]: + return QuorumWorkerContainer + + +celery_dev_worker_image = build( + path=".", + dockerfile="t/smoke/workers/docker/dev", + tag="t/smoke/worker:dev", + buildargs=QuorumWorkerContainer.buildargs(), +) + + +default_worker_container = container( + image="{celery_dev_worker_image.id}", + ports=fxtr("default_worker_ports"), + environment=fxtr("default_worker_env"), + network="{default_pytest_celery_network.name}", + volumes={ + # Volume: Worker /app + "{default_worker_volume.name}": defaults.DEFAULT_WORKER_VOLUME, + # Mount: Celery source + os.path.abspath(os.getcwd()): { + "bind": "/celery", + "mode": "rw", + }, + }, + wrapper_class=QuorumWorkerContainer, + timeout=defaults.DEFAULT_WORKER_CONTAINER_TIMEOUT, + command=fxtr("default_worker_command"), +) + + +@pytest.fixture +def default_worker_app(default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_transport_options = {"confirm_publish": True} + app.conf.task_default_queue_type = "quorum" + + return app diff --git a/t/smoke/tests/quorum_queues/test_quorum_queues.py b/t/smoke/tests/quorum_queues/test_quorum_queues.py new file mode 100644 index 00000000000..7748dce982d --- /dev/null +++ b/t/smoke/tests/quorum_queues/test_quorum_queues.py @@ -0,0 +1,36 @@ +import requests +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup +from requests.auth import HTTPBasicAuth + +from celery.canvas import group +from t.integration.tasks import add, identity +from t.smoke.tests.quorum_queues.conftest import RabbitMQManagementBroker + + +class test_broker_configuration: + def test_queue_type(self, celery_setup: CeleryTestSetup): + broker: RabbitMQManagementBroker = celery_setup.broker + api = broker.get_management_url() + "/api/queues" + response = requests.get(api, auth=HTTPBasicAuth("guest", "guest")) + assert response.status_code == 200 + res = response.json() + assert isinstance(res, list) + worker_queue = next((queue for queue in res if queue["name"] == celery_setup.worker.worker_queue), None) + assert worker_queue is not None, f'"{celery_setup.worker.worker_queue}" queue not found' + queue_type = worker_queue.get("type") + assert queue_type == "quorum", f'"{celery_setup.worker.worker_queue}" queue is not a quorum queue' + + +class test_quorum_queues: + def test_signature(self, celery_setup: CeleryTestSetup): + sig = identity.si("test_signature").set(queue=celery_setup.worker.worker_queue) + assert sig.delay().get(timeout=RESULT_TIMEOUT) == "test_signature" + + def test_group(self, celery_setup: CeleryTestSetup): + sig = group( + group(add.si(1, 1), add.si(2, 2)), + group([add.si(1, 1), add.si(2, 2)]), + group(s for s in [add.si(1, 1), add.si(2, 2)]), + ) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert res.get(timeout=RESULT_TIMEOUT) == [2, 4, 2, 4, 2, 4] diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index acbeecea08a..1293eb5d15e 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -137,17 +137,19 @@ def test_with_max_priority(self, queues_kwargs, qname, q, expected): class test_default_queues: + @pytest.mark.parametrize('default_queue_type', ['classic', 'quorum']) @pytest.mark.parametrize('name,exchange,rkey', [ ('default', None, None), ('default', 'exchange', None), ('default', 'exchange', 'routing_key'), ('default', None, 'routing_key'), ]) - def test_setting_default_queue(self, name, exchange, rkey): + def test_setting_default_queue(self, name, exchange, rkey, default_queue_type): self.app.conf.task_queues = {} self.app.conf.task_default_exchange = exchange self.app.conf.task_default_routing_key = rkey self.app.conf.task_default_queue = name + self.app.conf.task_default_queue_type = default_queue_type assert self.app.amqp.queues.default_exchange.name == exchange or name queues = dict(self.app.amqp.queues) assert len(queues) == 1 @@ -156,6 +158,11 @@ def test_setting_default_queue(self, name, exchange, rkey): assert queue.exchange.type == 'direct' assert queue.routing_key == rkey or name + if default_queue_type == 'quorum': + assert queue.queue_arguments == {'x-queue-type': 'quorum'} + else: + assert queue.queue_arguments is None + class test_default_exchange: diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 6613bd2a40e..3b8cb2a8322 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -9,14 +9,14 @@ from celery import bootsteps from celery.contrib.testing.mocks import ContextMock -from celery.exceptions import WorkerShutdown, WorkerTerminate +from celery.exceptions import CeleryWarning, WorkerShutdown, WorkerTerminate from celery.utils.collections import LimitedSet from celery.worker.consumer.agent import Agent from celery.worker.consumer.consumer import CANCEL_TASKS_BY_DEFAULT, CLOSE, TERMINATE, Consumer from celery.worker.consumer.gossip import Gossip from celery.worker.consumer.heart import Heart from celery.worker.consumer.mingle import Mingle -from celery.worker.consumer.tasks import Tasks +from celery.worker.consumer.tasks import ETA_TASKS_NO_GLOBAL_QOS_WARNING, Tasks from celery.worker.state import active_requests @@ -543,8 +543,13 @@ def test_start_heartbeat_interval(self): class test_Tasks: + def setup_method(self): + self.c = Mock() + self.c.app.conf.worker_detect_quorum_queues = True + self.c.connection.qos_semantics_matches_spec = False + def test_stop(self): - c = Mock() + c = self.c tasks = Tasks(c) assert c.task_consumer is None assert c.qos is None @@ -553,10 +558,59 @@ def test_stop(self): tasks.stop(c) def test_stop_already_stopped(self): - c = Mock() + c = self.c tasks = Tasks(c) tasks.stop(c) + def test_detect_quorum_queues_positive(self): + c = self.c + c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} + tasks = Tasks(c) + result, name = tasks.detect_quorum_queues(c) + assert result + assert name == "celery" + + def test_detect_quorum_queues_negative(self): + c = self.c + c.app.amqp.queues = {"celery": Mock(queue_arguments=None)} + tasks = Tasks(c) + result, name = tasks.detect_quorum_queues(c) + assert not result + assert name == "" + + def test_detect_quorum_queues_not_rabbitmq(self): + c = self.c + c.app.conf.broker_url = "redis://" + tasks = Tasks(c) + result, name = tasks.detect_quorum_queues(c) + assert not result + assert name == "" + + def test_qos_global_worker_detect_quorum_queues_false(self): + c = self.c + c.app.conf.worker_detect_quorum_queues = False + tasks = Tasks(c) + assert tasks.qos_global(c) is True + + def test_qos_global_worker_detect_quorum_queues_true_no_quorum_queues(self): + c = self.c + c.app.amqp.queues = {"celery": Mock(queue_arguments=None)} + tasks = Tasks(c) + assert tasks.qos_global(c) is True + + def test_qos_global_worker_detect_quorum_queues_true_with_quorum_queues(self): + c = self.c + c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} + tasks = Tasks(c) + assert tasks.qos_global(c) is False + + def test_qos_global_eta_warning(self): + c = self.c + c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} + tasks = Tasks(c) + with pytest.warns(CeleryWarning, match=ETA_TASKS_NO_GLOBAL_QOS_WARNING % "celery"): + tasks.qos_global(c) + class test_Agent: From 4755342dd8522035359cff8ad01e4ec3d8ac0e51 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 22 Jul 2024 01:44:23 +0300 Subject: [PATCH 0758/1051] Bump Kombu to v5.4.0rc3 (#9139) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index eb08a9be4ec..182e57a4422 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.4.0rc2,<6.0 +kombu>=5.4.0rc3,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 6f49a7bf10b150a4edb8e85db17e4c3e8a5f06b1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 22 Jul 2024 18:32:54 +0300 Subject: [PATCH 0759/1051] Cleanup in Changelog.rst (#9141) --- Changelog.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Changelog.rst b/Changelog.rst index a410e35ecb9..985be8f1a17 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -90,6 +90,8 @@ Changes since 5.4.0rc2 - Bump pytest-order from 1.2.0 to 1.2.1 (#8941) - Added documentation to the smoke tests infra (#8970) - Added a checklist item for using pytest-celery in a bug report (#8971) +- Added changelog for v5.4.0 (#8973) +- Bump version: 5.4.0rc2 → 5.4.0 (#8974) .. _version-5.4.0rc2: @@ -217,7 +219,6 @@ The code changes are mostly fix for regressions. More details can be found below - Update elasticsearch version (#8656) - Propagates more ImportErrors during autodiscovery (#8632) - .. _version-5.3.5: 5.3.5 @@ -341,8 +342,6 @@ The code changes are mostly fix for regressions. More details can be found below - Revert "Add Semgrep to CI" (#8477) - Revert "Revert "Add Semgrep to CI"" (#8478) -.. _CELERY: - .. _version-5.3.3: 5.3.3 (Yanked) From 9c5a687c2b70d6e097227e65068477729bdc8e03 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 20:39:28 +0300 Subject: [PATCH 0760/1051] [pre-commit.ci] pre-commit autoupdate (#9142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.10.1 → v1.11.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.10.1...v1.11.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0cb91803762..add6cd19744 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.1 + rev: v1.11.0 hooks: - id: mypy pass_filenames: false From ec05f2195587711cd3aa6ccc021e5af6c17713db Mon Sep 17 00:00:00 2001 From: Tyler Smith Date: Tue, 23 Jul 2024 06:25:49 -0700 Subject: [PATCH 0761/1051] Update first-steps-with-django.rst (#9143) Update Django cache settings --- docs/django/first-steps-with-django.rst | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index f069334caac..28654a633a0 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -260,17 +260,14 @@ To use this with your project you need to follow these steps: CELERY_RESULT_BACKEND = 'django-db' - For the cache backend you can use: + When using the cache backend, you can specify a cache defined within + Django's CACHES setting. .. code-block:: python - CELERY_CACHE_BACKEND = 'django-cache' + CELERY_RESULT_BACKEND = 'django-cache' - We can also use the cache defined in the CACHES setting in django. - - .. code-block:: python - - # celery setting. + # pick which cache from the CACHES setting. CELERY_CACHE_BACKEND = 'default' # django setting. From 2b001a17927fe6fbaeb8f7185bc8aa1a63f62748 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 23 Jul 2024 16:52:41 +0300 Subject: [PATCH 0762/1051] Added missing docs to previous releases (#9144) --- docs/history/changelog-5.3.rst | 529 +++++++++++++++++++++++++++++++++ docs/history/changelog-5.4.rst | 200 +++++++++++++ docs/history/index.rst | 2 + 3 files changed, 731 insertions(+) create mode 100644 docs/history/changelog-5.3.rst create mode 100644 docs/history/changelog-5.4.rst diff --git a/docs/history/changelog-5.3.rst b/docs/history/changelog-5.3.rst new file mode 100644 index 00000000000..7b5802a8359 --- /dev/null +++ b/docs/history/changelog-5.3.rst @@ -0,0 +1,529 @@ +.. _changelog-5.3: + +================ + Change history +================ + +This document contains change notes for bugfix & new features +in the & 5.3.x series, please see :ref:`whatsnew-5.3` for +an overview of what's new in Celery 5.3. + +.. _version-5.3.6: + +5.3.6 +===== + +:release-date: 2023-11-22 9:15 P.M GMT+6 +:release-by: Asif Saif Uddin + +This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. +The code changes are mostly fix for regressions. More details can be found below. + +- Increased docker-build CI job timeout from 30m -> 60m (#8635) +- Incredibly minor spelling fix. (#8649) +- Fix non-zero exit code when receiving remote shutdown (#8650) +- Update task.py get_custom_headers missing 'compression' key (#8633) +- Update kombu>=5.3.4 to fix SQS request compatibility with boto JSON serializer (#8646) +- test requirements version update (#8655) +- Update elasticsearch version (#8656) +- Propagates more ImportErrors during autodiscovery (#8632) + +.. _version-5.3.5: + +5.3.5 +===== + +:release-date: 2023-11-10 7:15 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Update test.txt versions (#8481) +- fix os.getcwd() FileNotFoundError (#8448) +- Fix typo in CONTRIBUTING.rst (#8494) +- typo(doc): configuration.rst (#8484) +- assert before raise (#8495) +- Update GHA checkout version (#8496) +- Fixed replaced_task_nesting (#8500) +- Fix code indentation for route_task() example (#8502) +- support redis 5.x (#8504) +- Fix typos in test_canvas.py (#8498) +- Marked flaky tests (#8508) +- Fix typos in calling.rst (#8506) +- Added support for replaced_task_nesting in chains (#8501) +- Fix typos in canvas.rst (#8509) +- Patch Version Release Checklist (#8488) +- Added Python 3.11 support to Dockerfile (#8511) +- Dependabot (Celery) (#8510) +- Bump actions/checkout from 3 to 4 (#8512) +- Update ETA example to include timezone (#8516) +- Replaces datetime.fromisoformat with the more lenient dateutil parser (#8507) +- Fixed indentation in Dockerfile for Python 3.11 (#8527) +- Fix git bug in Dockerfile (#8528) +- Tox lint upgrade from Python 3.9 to Python 3.11 (#8526) +- Document gevent concurrency (#8520) +- Update test.txt (#8530) +- Celery Docker Upgrades (#8531) +- pyupgrade upgrade v3.11.0 -> v3.13.0 (#8535) +- Update msgpack.txt (#8548) +- Update auth.txt (#8547) +- Update msgpack.txt to fix build issues (#8552) +- Basic ElasticSearch / ElasticClient 8.x Support (#8519) +- Fix eager tasks does not populate name field (#8486) +- Fix typo in celery.app.control (#8563) +- Update solar.txt ephem (#8566) +- Update test.txt pytest-timeout (#8565) +- Correct some mypy errors (#8570) +- Update elasticsearch.txt (#8573) +- Update test.txt deps (#8574) +- Update test.txt (#8590) +- Improved the "Next steps" documentation (#8561). (#8600) +- Disabled couchbase tests due to broken package breaking main (#8602) +- Update elasticsearch deps (#8605) +- Update cryptography==41.0.5 (#8604) +- Update pytest==7.4.3 (#8606) +- test initial support of python 3.12.x (#8549) +- updated new versions to fix CI (#8607) +- Update zstd.txt (#8609) +- Fixed CI Support with Python 3.12 (#8611) +- updated CI, docs and classifier for next release (#8613) +- updated dockerfile to add python 3.12 (#8614) +- lint,mypy,docker-unit-tests -> Python 3.12 (#8617) +- Correct type of `request` in `task_revoked` documentation (#8616) +- update docs docker image (#8618) +- Fixed RecursionError caused by giving `config_from_object` nested mod… (#8619) +- Fix: serialization error when gossip working (#6566) +- [documentation] broker_connection_max_retries of 0 does not mean "retry forever" (#8626) +- added 2 debian package for better stability in Docker (#8629) + +.. _version-5.3.4: + +5.3.4 +===== + +:release-date: 2023-09-03 10:10 P.M GMT+2 +:release-by: Tomer Nosrati + +.. warning:: + This version has reverted the breaking changes introduced in 5.3.2 and 5.3.3: + + - Revert "store children with database backend" (#8475) + - Revert "Fix eager tasks does not populate name field" (#8476) + +- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) +- User guide fix (hotfix for #1755) (#8342) +- store children with database backend (#8338) +- Stamping bugfix with group/chord header errback linking (#8347) +- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) +- Fixing minor typo in code example in calling.rst (#8366) +- add documents for timeout settings (#8373) +- fix: copyright year (#8380) +- setup.py: enable include_package_data (#8379) +- Fix eager tasks does not populate name field (#8383) +- Update test.txt dependencies (#8389) +- Update auth.txt deps (#8392) +- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) +- Support preload options for shell and purge commands (#8374) +- Implement safer ArangoDB queries (#8351) +- integration test: cleanup worker after test case (#8361) +- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) +- Update README.rst (#8404) +- Update README.rst (#8408) +- fix(canvas): add group index when unrolling tasks (#8427) +- fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) +- Lint fixes & pre-commit autoupdate (#8414) +- Update auth.txt (#8435) +- Update mypy on test.txt (#8438) +- added missing kwargs arguments in some cli cmd (#8049) +- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) +- Docs: rewrite out-of-date code (#8441) +- Limit redis client to 4.x since 5.x fails the test suite (#8442) +- Limit tox to < 4.9 (#8443) +- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable (#8446) +- doc update from #7651 (#8451) +- Remove tox version limit (#8464) +- Fixed AttributeError: 'str' object has no attribute (#8463) +- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) +- Document need for CELERY_ prefix on CLI env vars (#8469) +- Use string value for CELERY_SKIP_CHECKS envvar (#8462) +- Revert "store children with database backend" (#8475) +- Revert "Fix eager tasks does not populate name field" (#8476) +- Update Changelog (#8474) +- Remove as it seems to be buggy. (#8340) +- Revert "Add Semgrep to CI" (#8477) +- Revert "Revert "Add Semgrep to CI"" (#8478) + +.. _version-5.3.3: + +5.3.3 (Yanked) +============== + +:release-date: 2023-08-31 1:47 P.M GMT+2 +:release-by: Tomer Nosrati + +.. warning:: + This version has been yanked due to breaking API changes. The breaking changes include: + + - Store children with database backend (#8338) + - Fix eager tasks does not populate name field (#8383) + +- Fixed changelog for 5.3.2 release docs. + +.. _version-5.3.2: + +5.3.2 (Yanked) +============== + +:release-date: 2023-08-31 1:30 P.M GMT+2 +:release-by: Tomer Nosrati + +.. warning:: + This version has been yanked due to breaking API changes. The breaking changes include: + + - Store children with database backend (#8338) + - Fix eager tasks does not populate name field (#8383) + +- Bugfix: Removed unecessary stamping code from _chord.run() (#8339) +- User guide fix (hotfix for #1755) (#8342) +- Store children with database backend (#8338) +- Stamping bugfix with group/chord header errback linking (#8347) +- Use argsrepr and kwargsrepr in LOG_RECEIVED (#8301) +- Fixing minor typo in code example in calling.rst (#8366) +- Add documents for timeout settings (#8373) +- Fix: copyright year (#8380) +- Setup.py: enable include_package_data (#8379) +- Fix eager tasks does not populate name field (#8383) +- Update test.txt dependencies (#8389) +- Update auth.txt deps (#8392) +- Fix backend.get_task_meta ignores the result_extended config parameter in mongodb backend (#8391) +- Support preload options for shell and purge commands (#8374) +- Implement safer ArangoDB queries (#8351) +- Integration test: cleanup worker after test case (#8361) +- Added "Tomer Nosrati" to CONTRIBUTORS.txt (#8400) +- Update README.rst (#8404) +- Update README.rst (#8408) +- Fix(canvas): add group index when unrolling tasks (#8427) +- Fix(beat): debug statement should only log AsyncResult.id if it exists (#8428) +- Lint fixes & pre-commit autoupdate (#8414) +- Update auth.txt (#8435) +- Update mypy on test.txt (#8438) +- Added missing kwargs arguments in some cli cmd (#8049) +- Fix #8431: Set format_date to False when calling _get_result_meta on mongo backend (#8432) +- Docs: rewrite out-of-date code (#8441) +- Limit redis client to 4.x since 5.x fails the test suite (#8442) +- Limit tox to < 4.9 (#8443) +- Fixed issue: Flags broker_connection_retry_on_startup & broker_connection_retry aren’t reliable (#8446) +- Doc update from #7651 (#8451) +- Remove tox version limit (#8464) +- Fixed AttributeError: 'str' object has no attribute (#8463) +- Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) + +.. _version-5.3.1: + +5.3.1 +===== + +:release-date: 2023-06-18 8:15 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Upgrade to latest pycurl release (#7069). +- Limit librabbitmq>=2.0.0; python_version < '3.11' (#8302). +- Added initial support for python 3.11 (#8304). +- ChainMap observers fix (#8305). +- Revert optimization CLI flag behaviour back to original. +- Restrict redis 4.5.5 as it has severe bugs (#8317). +- Tested pypy 3.10 version in CI (#8320). +- Bump new version of kombu to 5.3.1 (#8323). +- Fixed a small float value of retry_backoff (#8295). +- Limit pyro4 up to python 3.10 only as it is (#8324). + +.. _version-5.3.0: + +5.3.0 +===== + +:release-date: 2023-06-06 12:00 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Test kombu 5.3.0 & minor doc update (#8294). +- Update librabbitmq.txt > 2.0.0 (#8292). +- Upgrade syntax to py3.8 (#8281). + +.. _version-5.3.0rc2: + +5.3.0rc2 +======== + +:release-date: 2023-05-31 9:00 P.M GMT+6 +:release-by: Asif Saif Uddin + +- Add missing dependency. +- Fix exc_type being the exception instance rather. +- Fixed revoking tasks by stamped headers (#8269). +- Support sqlalchemy 2.0 in tests (#8271). +- Fix docker (#8275). +- Update redis.txt to 4.5 (#8278). +- Update kombu>=5.3.0rc2. + + +.. _version-5.3.0rc1: + +5.3.0rc1 +======== + +:release-date: 2023-05-11 4:24 P.M GMT+2 +:release-by: Tomer Nosrati + +- fix functiom name by @cuishuang in #8087 +- Update CELERY_TASK_EAGER setting in user guide by @thebalaa in #8085 +- Stamping documentation fixes & cleanups by @Nusnus in #8092 +- switch to maintained pyro5 by @auvipy in #8093 +- udate dependencies of tests by @auvipy in #8095 +- cryptography==39.0.1 by @auvipy in #8096 +- Annotate celery/security/certificate.py by @Kludex in #7398 +- Deprecate parse_iso8601 in favor of fromisoformat by @stumpylog in #8098 +- pytest==7.2.2 by @auvipy in #8106 +- Type annotations for celery/utils/text.py by @max-muoto in #8107 +- Update web framework URLs by @sblondon in #8112 +- Fix contribution URL by @sblondon in #8111 +- Trying to clarify CERT_REQUIRED by @pamelafox in #8113 +- Fix potential AttributeError on 'stamps' by @Darkheir in #8115 +- Type annotations for celery/apps/beat.py by @max-muoto in #8108 +- Fixed bug where retrying a task loses its stamps by @Nusnus in #8120 +- Type hints for celery/schedules.py by @max-muoto in #8114 +- Reference Gopher Celery in README by @marselester in #8131 +- Update sqlalchemy.txt by @auvipy in #8136 +- azure-storage-blob 12.15.0 by @auvipy in #8137 +- test kombu 5.3.0b3 by @auvipy in #8138 +- fix: add expire string parse. by @Bidaya0 in #8134 +- Fix worker crash on un-pickleable exceptions by @youtux in #8133 +- CLI help output: avoid text rewrapping by click by @woutdenolf in #8152 +- Warn when an unnamed periodic task override another one. by @iurisilvio in #8143 +- Fix Task.handle_ignore not wrapping exceptions properly by @youtux in #8149 +- Hotfix for (#8120) - Stamping bug with retry by @Nusnus in #8158 +- Fix integration test by @youtux in #8156 +- Fixed bug in revoke_by_stamped_headers where impl did not match doc by @Nusnus in #8162 +- Align revoke and revoke_by_stamped_headers return values (terminate=True) by @Nusnus in #8163 +- Update & simplify GHA pip caching by @stumpylog in #8164 +- Update auth.txt by @auvipy in #8167 +- Update test.txt versions by @auvipy in #8173 +- remove extra = from test.txt by @auvipy in #8179 +- Update sqs.txt kombu[sqs]>=5.3.0b3 by @auvipy in #8174 +- Added signal triggered before fork by @jaroslawporada in #8177 +- Update documentation on SQLAlchemy by @max-muoto in #8188 +- Deprecate pytz and use zoneinfo by @max-muoto in #8159 +- Update dev.txt by @auvipy in #8192 +- Update test.txt by @auvipy in #8193 +- Update test-integration.txt by @auvipy in #8194 +- Update zstd.txt by @auvipy in #8195 +- Update s3.txt by @auvipy in #8196 +- Update msgpack.txt by @auvipy in #8199 +- Update solar.txt by @auvipy in #8198 +- Add Semgrep to CI by @Nusnus in #8201 +- Added semgrep to README.rst by @Nusnus in #8202 +- Update django.txt by @auvipy in #8197 +- Update redis.txt 4.3.6 by @auvipy in #8161 +- start removing codecov from pypi by @auvipy in #8206 +- Update test.txt dependencies by @auvipy in #8205 +- Improved doc for: worker_deduplicate_successful_tasks by @Nusnus in #8209 +- Renamed revoked_headers to revoked_stamps by @Nusnus in #8210 +- Ensure argument for map is JSON serializable by @candleindark in #8229 + +.. _version-5.3.0b2: + +5.3.0b2 +======= + +:release-date: 2023-02-19 1:47 P.M GMT+2 +:release-by: Asif Saif Uddin + +- BLM-2: Adding unit tests to chord clone by @Nusnus in #7668 +- Fix unknown task error typo by @dcecile in #7675 +- rename redis integration test class so that tests are executed by @wochinge in #7684 +- Check certificate/private key type when loading them by @qrmt in #7680 +- Added integration test_chord_header_id_duplicated_on_rabbitmq_msg_duplication() by @Nusnus in #7692 +- New feature flag: allow_error_cb_on_chord_header - allowing setting an error callback on chord header by @Nusnus in #7712 +- Update README.rst sorting Python/Celery versions by @andrebr in #7714 +- Fixed a bug where stamping a chord body would not use the correct stamping method by @Nusnus in #7722 +- Fixed doc duplication typo for Signature.stamp() by @Nusnus in #7725 +- Fix issue 7726: variable used in finally block may not be instantiated by @woutdenolf in #7727 +- Fixed bug in chord stamping with another chord as a body + unit test by @Nusnus in #7730 +- Use "describe_table" not "create_table" to check for existence of DynamoDB table by @maxfirman in #7734 +- Enhancements for task_allow_error_cb_on_chord_header tests and docs by @Nusnus in #7744 +- Improved custom stamping visitor documentation by @Nusnus in #7745 +- Improved the coverage of test_chord_stamping_body_chord() by @Nusnus in #7748 +- billiard >= 3.6.3.0,<5.0 for rpm by @auvipy in #7764 +- Fixed memory leak with ETA tasks at connection error when worker_cancel_long_running_tasks_on_connection_loss is enabled by @Nusnus in #7771 +- Fixed bug where a chord with header of type tuple was not supported in the link_error flow for task_allow_error_cb_on_chord_header flag by @Nusnus in #7772 +- Scheduled weekly dependency update for week 38 by @pyup-bot in #7767 +- recreate_module: set spec to the new module by @skshetry in #7773 +- Override integration test config using integration-tests-config.json by @thedrow in #7778 +- Fixed error handling bugs due to upgrade to a newer version of billiard by @Nusnus in #7781 +- Do not recommend using easy_install anymore by @jugmac00 in #7789 +- GitHub Workflows security hardening by @sashashura in #7768 +- Update ambiguous acks_late doc by @Zhong-z in #7728 +- billiard >=4.0.2,<5.0 by @auvipy in #7720 +- importlib_metadata remove deprecated entry point interfaces by @woutdenolf in #7785 +- Scheduled weekly dependency update for week 41 by @pyup-bot in #7798 +- pyzmq>=22.3.0 by @auvipy in #7497 +- Remove amqp from the BACKEND_ALISES list by @Kludex in #7805 +- Replace print by logger.debug by @Kludex in #7809 +- Ignore coverage on except ImportError by @Kludex in #7812 +- Add mongodb dependencies to test.txt by @Kludex in #7810 +- Fix grammar typos on the whole project by @Kludex in #7815 +- Remove isatty wrapper function by @Kludex in #7814 +- Remove unused variable _range by @Kludex in #7813 +- Add type annotation on concurrency/threads.py by @Kludex in #7808 +- Fix linter workflow by @Kludex in #7816 +- Scheduled weekly dependency update for week 42 by @pyup-bot in #7821 +- Remove .cookiecutterrc by @Kludex in #7830 +- Remove .coveragerc file by @Kludex in #7826 +- kombu>=5.3.0b2 by @auvipy in #7834 +- Fix readthedocs build failure by @woutdenolf in #7835 +- Fixed bug in group, chord, chain stamp() method, where the visitor overrides the previously stamps in tasks of these objects by @Nusnus in #7825 +- Stabilized test_mutable_errback_called_by_chord_from_group_fail_multiple by @Nusnus in #7837 +- Use SPDX license expression in project metadata by @RazerM in #7845 +- New control command revoke_by_stamped_headers by @Nusnus in #7838 +- Clarify wording in Redis priority docs by @strugee in #7853 +- Fix non working example of using celery_worker pytest fixture by @paradox-lab in #7857 +- Removed the mandatory requirement to include stamped_headers key when implementing on_signature() by @Nusnus in #7856 +- Update serializer docs by @sondrelg in #7858 +- Remove reference to old Python version by @Kludex in #7829 +- Added on_replace() to Task to allow manipulating the replaced sig with custom changes at the end of the task.replace() by @Nusnus in #7860 +- Add clarifying information to completed_count documentation by @hankehly in #7873 +- Stabilized test_revoked_by_headers_complex_canvas by @Nusnus in #7877 +- StampingVisitor will visit the callbacks and errbacks of the signature by @Nusnus in #7867 +- Fix "rm: no operand" error in clean-pyc script by @hankehly in #7878 +- Add --skip-checks flag to bypass django core checks by @mudetz in #7859 +- Scheduled weekly dependency update for week 44 by @pyup-bot in #7868 +- Added two new unit tests to callback stamping by @Nusnus in #7882 +- Sphinx extension: use inspect.signature to make it Python 3.11 compatible by @mathiasertl in #7879 +- cryptography==38.0.3 by @auvipy in #7886 +- Canvas.py doc enhancement by @Nusnus in #7889 +- Fix typo by @sondrelg in #7890 +- fix typos in optional tests by @hsk17 in #7876 +- Canvas.py doc enhancement by @Nusnus in #7891 +- Fix revoke by headers tests stability by @Nusnus in #7892 +- feat: add global keyprefix for backend result keys by @kaustavb12 in #7620 +- Canvas.py doc enhancement by @Nusnus in #7897 +- fix(sec): upgrade sqlalchemy to 1.2.18 by @chncaption in #7899 +- Canvas.py doc enhancement by @Nusnus in #7902 +- Fix test warnings by @ShaheedHaque in #7906 +- Support for out-of-tree worker pool implementations by @ShaheedHaque in #7880 +- Canvas.py doc enhancement by @Nusnus in #7907 +- Use bound task in base task example. Closes #7909 by @WilliamDEdwards in #7910 +- Allow the stamping visitor itself to set the stamp value type instead of casting it to a list by @Nusnus in #7914 +- Stamping a task left the task properties dirty by @Nusnus in #7916 +- Fixed bug when chaining a chord with a group by @Nusnus in #7919 +- Fixed bug in the stamping visitor mechanism where the request was lacking the stamps in the 'stamps' property by @Nusnus in #7928 +- Fixed bug in task_accepted() where the request was not added to the requests but only to the active_requests by @Nusnus in #7929 +- Fix bug in TraceInfo._log_error() where the real exception obj was hiding behind 'ExceptionWithTraceback' by @Nusnus in #7930 +- Added integration test: test_all_tasks_of_canvas_are_stamped() by @Nusnus in #7931 +- Added new example for the stamping mechanism: examples/stamping by @Nusnus in #7933 +- Fixed a bug where replacing a stamped task and stamping it again by @Nusnus in #7934 +- Bugfix for nested group stamping on task replace by @Nusnus in #7935 +- Added integration test test_stamping_example_canvas() by @Nusnus in #7937 +- Fixed a bug in losing chain links when unchaining an inner chain with links by @Nusnus in #7938 +- Removing as not mandatory by @auvipy in #7885 +- Housekeeping for Canvas.py by @Nusnus in #7942 +- Scheduled weekly dependency update for week 50 by @pyup-bot in #7954 +- try pypy 3.9 in CI by @auvipy in #7956 +- sqlalchemy==1.4.45 by @auvipy in #7943 +- billiard>=4.1.0,<5.0 by @auvipy in #7957 +- feat(typecheck): allow changing type check behavior on the app level; by @moaddib666 in #7952 +- Add broker_channel_error_retry option by @nkns165 in #7951 +- Add beat_cron_starting_deadline_seconds to prevent unwanted cron runs by @abs25 in #7945 +- Scheduled weekly dependency update for week 51 by @pyup-bot in #7965 +- Added doc to "retry_errors" newly supported field of "publish_retry_policy" of the task namespace by @Nusnus in #7967 +- Renamed from master to main in the docs and the CI workflows by @Nusnus in #7968 +- Fix docs for the exchange to use with worker_direct by @alessio-b2c2 in #7973 +- Pin redis==4.3.4 by @auvipy in #7974 +- return list of nodes to make sphinx extension compatible with Sphinx 6.0 by @mathiasertl in #7978 +- use version range redis>=4.2.2,<4.4.0 by @auvipy in #7980 +- Scheduled weekly dependency update for week 01 by @pyup-bot in #7987 +- Add annotations to minimise differences with celery-aio-pool's tracer.py. by @ShaheedHaque in #7925 +- Fixed bug where linking a stamped task did not add the stamp to the link's options by @Nusnus in #7992 +- sqlalchemy==1.4.46 by @auvipy in #7995 +- pytz by @auvipy in #8002 +- Fix few typos, provide configuration + workflow for codespell to catch any new by @yarikoptic in #8023 +- RabbitMQ links update by @arnisjuraga in #8031 +- Ignore files generated by tests by @Kludex in #7846 +- Revert "sqlalchemy==1.4.46 (#7995)" by @Nusnus in #8033 +- Fixed bug with replacing a stamped task with a chain or a group (inc. links/errlinks) by @Nusnus in #8034 +- Fixed formatting in setup.cfg that caused flake8 to misbehave by @Nusnus in #8044 +- Removed duplicated import Iterable by @Nusnus in #8046 +- Fix docs by @Nusnus in #8047 +- Document --logfile default by @strugee in #8057 +- Stamping Mechanism Refactoring by @Nusnus in #8045 +- result_backend_thread_safe config shares backend across threads by @CharlieTruong in #8058 +- Fix cronjob that use day of month and negative UTC timezone by @pkyosx in #8053 +- Stamping Mechanism Examples Refactoring by @Nusnus in #8060 +- Fixed bug in Task.on_stamp_replaced() by @Nusnus in #8061 +- Stamping Mechanism Refactoring 2 by @Nusnus in #8064 +- Changed default append_stamps from True to False (meaning duplicates … by @Nusnus in #8068 +- typo in comment: mailicious => malicious by @yanick in #8072 +- Fix command for starting flower with specified broker URL by @ShukantPal in #8071 +- Improve documentation on ETA/countdown tasks (#8069) by @norbertcyran in #8075 + +.. _version-5.3.0b1: + +5.3.0b1 +======= + +:release-date: 2022-08-01 5:15 P.M UTC+6:00 +:release-by: Asif Saif Uddin + +- Canvas Header Stamping (#7384). +- async chords should pass it's kwargs to the group/body. +- beat: Suppress banner output with the quiet option (#7608). +- Fix honor Django's TIME_ZONE setting. +- Don't warn about DEBUG=True for Django. +- Fixed the on_after_finalize cannot access tasks due to deadlock. +- Bump kombu>=5.3.0b1,<6.0. +- Make default worker state limits configurable (#7609). +- Only clear the cache if there are no active writers. +- Billiard 4.0.1 + +.. _version-5.3.0a1: + +5.3.0a1 +======= + +:release-date: 2022-06-29 5:15 P.M UTC+6:00 +:release-by: Asif Saif Uddin + +- Remove Python 3.4 compatibility code. +- call ping to set connection attr for avoiding redis parse_response error. +- Use importlib instead of deprecated pkg_resources. +- fix #7245 uid duplicated in command params. +- Fix subscribed_to maybe empty (#7232). +- Fix: Celery beat sleeps 300 seconds sometimes even when it should run a task within a few seconds (e.g. 13 seconds) #7290. +- Add security_key_password option (#7292). +- Limit elasticsearch support to below version 8.0. +- try new major release of pytest 7 (#7330). +- broker_connection_retry should no longer apply on startup (#7300). +- Remove __ne__ methods (#7257). +- fix #7200 uid and gid. +- Remove exception-throwing from the signal handler. +- Add mypy to the pipeline (#7383). +- Expose more debugging information when receiving unknown tasks. (#7405) +- Avoid importing buf_t from billiard's compat module as it was removed. +- Avoid negating a constant in a loop. (#7443) +- Ensure expiration is of float type when migrating tasks (#7385). +- load_extension_class_names - correct module_name (#7406) +- Bump pymongo[srv]>=4.0.2. +- Use inspect.getgeneratorstate in asynpool.gen_not_started (#7476). +- Fix test with missing .get() (#7479). +- azure-storage-blob>=12.11.0 +- Make start_worker, setup_default_app reusable outside of pytest. +- Ensure a proper error message is raised when id for key is empty (#7447). +- Crontab string representation does not match UNIX crontab expression. +- Worker should exit with ctx.exit to get the right exitcode for non-zero. +- Fix expiration check (#7552). +- Use callable built-in. +- Include dont_autoretry_for option in tasks. (#7556) +- fix: Syntax error in arango query. +- Fix custom headers propagation on task retries (#7555). +- Silence backend warning when eager results are stored. +- Reduce prefetch count on restart and gradually restore it (#7350). +- Improve workflow primitive subclassing (#7593). +- test kombu>=5.3.0a1,<6.0 (#7598). +- Canvas Header Stamping (#7384). diff --git a/docs/history/changelog-5.4.rst b/docs/history/changelog-5.4.rst new file mode 100644 index 00000000000..44cf6b74600 --- /dev/null +++ b/docs/history/changelog-5.4.rst @@ -0,0 +1,200 @@ +.. _changelog-5.4: + +================ + Change history +================ + +This document contains change notes for bugfix & new features +in the & 5.4.x series, please see :ref:`whatsnew-5.4` for +an overview of what's new in Celery 5.4. + +.. _version-5.4.0: + +5.4.0 +===== + +:release-date: 2024-04-17 +:release-by: Tomer Nosrati + +Celery v5.4.0 and v5.3.x have consistently focused on enhancing the overall QA, both internally and externally. +This effort led to the new pytest-celery v1.0.0 release, developed concurrently with v5.3.0 & v5.4.0. + +This release introduces two significant QA enhancements: + +- **Smoke Tests**: A new layer of automatic tests has been added to Celery's standard CI. These tests are designed to handle production scenarios and complex conditions efficiently. While new contributions will not be halted due to the lack of smoke tests, we will request smoke tests for advanced changes where appropriate. +- `Standalone Bug Report Script `_: The new pytest-celery plugin now allows for encapsulating a complete Celery dockerized setup within a single pytest script. Incorporating these into new bug reports will enable us to reproduce reported bugs deterministically, potentially speeding up the resolution process. + +Contrary to the positive developments above, there have been numerous reports about issues with the Redis broker malfunctioning +upon restarts and disconnections. Our initial attempts to resolve this were not successful (#8796). +With our enhanced QA capabilities, we are now prepared to address the core issue with Redis (as a broker) again. + +The rest of the changes for this release are grouped below, with the changes from the latest release candidate listed at the end. + +Changes +------- +- Add a Task class specialised for Django (#8491) +- Add Google Cloud Storage (GCS) backend (#8868) +- Added documentation to the smoke tests infra (#8970) +- Added a checklist item for using pytest-celery in a bug report (#8971) +- Bugfix: Missing id on chain (#8798) +- Bugfix: Worker not consuming tasks after Redis broker restart (#8796) +- Catch UnicodeDecodeError when opening corrupt beat-schedule.db (#8806) +- chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8826) +- Doc: Enhance "Testing with Celery" section (#8955) +- Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) +- Enable efficient `chord` when using dynamicdb as backend store (#8783) +- feat(daemon): allows daemonization options to be fetched from app settings (#8553) +- Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) +- Fix recursive result parents on group in middle of chain (#8903) +- Fix typos and grammar (#8915) +- Fixed version documentation tag from #8553 in configuration.rst (#8802) +- Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) +- Make custom remote control commands available in CLI (#8489) +- Print safe_say() to stdout for non-error flows (#8919) +- Support moto 5.0 (#8838) +- Update contributing guide to use ssh upstream url (#8881) +- Update optimizing.rst (#8945) +- Updated concurrency docs page. (#8753) + +Dependencies Updates +-------------------- +- Bump actions/setup-python from 4 to 5 (#8701) +- Bump codecov/codecov-action from 3 to 4 (#8831) +- Bump isort from 5.12.0 to 5.13.2 (#8772) +- Bump msgpack from 1.0.7 to 1.0.8 (#8885) +- Bump mypy from 1.8.0 to 1.9.0 (#8898) +- Bump pre-commit to 3.6.1 (#8839) +- Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) +- Bump pytest from 8.0.2 to 8.1.1 (#8901) +- Bump pytest-celery to v1.0.0 (#8962) +- Bump pytest-cov to 5.0.0 (#8924) +- Bump pytest-order from 1.2.0 to 1.2.1 (#8941) +- Bump pytest-subtests from 0.11.0 to 0.12.1 (#8896) +- Bump pytest-timeout from 2.2.0 to 2.3.1 (#8894) +- Bump python-memcached from 1.59 to 1.61 (#8776) +- Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) +- Update cryptography to 42.0.5 (#8869) +- Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) +- Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) +- Upgraded Sphinx from v5.3.0 to v7.x.x (#8803) + +Changes since 5.4.0rc2 +---------------------- +- Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) +- Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) +- Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) +- Bump pytest-celery to 1.0.0rc3 (#8946) +- Update optimizing.rst (#8945) +- Doc: Enhance "Testing with Celery" section (#8955) +- Bump pytest-celery to v1.0.0 (#8962) +- Bump pytest-order from 1.2.0 to 1.2.1 (#8941) +- Added documentation to the smoke tests infra (#8970) +- Added a checklist item for using pytest-celery in a bug report (#8971) +- Added changelog for v5.4.0 (#8973) +- Bump version: 5.4.0rc2 → 5.4.0 (#8974) + +.. _version-5.4.0rc2: + +5.4.0rc2 +======== + +:release-date: 2024-03-27 +:release-by: Tomer Nosrati + +- feat(daemon): allows daemonization options to be fetched from app settings (#8553) +- Fixed version documentation tag from #8553 in configuration.rst (#8802) +- Upgraded Sphinx from v5.3.0 to v7.x.x (#8803) +- Update elasticsearch requirement from <=8.11.1 to <=8.12.0 (#8810) +- Update elastic-transport requirement from <=8.11.0 to <=8.12.0 (#8811) +- Update cryptography to 42.0.0 (#8814) +- Catch UnicodeDecodeError when opening corrupt beat-schedule.db (#8806) +- Update cryptography to 42.0.1 (#8817) +- Limit moto to <5.0.0 until the breaking issues are fixed (#8820) +- Enable efficient `chord` when using dynamicdb as backend store (#8783) +- Add a Task class specialised for Django (#8491) +- Sync kombu versions in requirements and setup.cfg (#8825) +- chore(ci): Enhance CI with `workflow_dispatch` for targeted debugging and testing (#8826) +- Update cryptography to 42.0.2 (#8827) +- Docfix: pip install celery[sqs] -> pip install "celery[sqs]" (#8829) +- Bump pre-commit/action from 3.0.0 to 3.0.1 (#8835) +- Support moto 5.0 (#8838) +- Another fix for `link_error` signatures being `dict`s instead of `Signature` s (#8841) +- Bump codecov/codecov-action from 3 to 4 (#8831) +- Upgrade from pytest-celery v1.0.0b1 -> v1.0.0b2 (#8843) +- Bump pytest from 7.4.4 to 8.0.0 (#8823) +- Update pre-commit to 3.6.1 (#8839) +- Update cryptography to 42.0.3 (#8854) +- Bump pytest from 8.0.0 to 8.0.1 (#8855) +- Update cryptography to 42.0.4 (#8864) +- Update pytest to 8.0.2 (#8870) +- Update cryptography to 42.0.5 (#8869) +- Update elasticsearch requirement from <=8.12.0 to <=8.12.1 (#8867) +- Eliminate consecutive chords generated by group | task upgrade (#8663) +- Make custom remote control commands available in CLI (#8489) +- Add Google Cloud Storage (GCS) backend (#8868) +- Bump msgpack from 1.0.7 to 1.0.8 (#8885) +- Update pytest to 8.1.0 (#8886) +- Bump pytest-timeout from 2.2.0 to 2.3.1 (#8894) +- Bump pytest-subtests from 0.11.0 to 0.12.1 (#8896) +- Bump mypy from 1.8.0 to 1.9.0 (#8898) +- Update pytest to 8.1.1 (#8901) +- Update contributing guide to use ssh upstream url (#8881) +- Fix recursive result parents on group in middle of chain (#8903) +- Bump pytest-celery to 1.0.0b4 (#8899) +- Adjusted smoke tests CI time limit (#8907) +- Update pytest-rerunfailures to 14.0 (#8910) +- Use the "all" extra for pytest-celery (#8911) +- Fix typos and grammar (#8915) +- Bump pytest-celery to 1.0.0rc1 (#8918) +- Print safe_say() to stdout for non-error flows (#8919) +- Update pytest-cov to 5.0.0 (#8924) +- Bump pytest-celery to 1.0.0rc2 (#8928) + +.. _version-5.4.0rc1: + +5.4.0rc1 +======== + +:release-date: 2024-01-17 7:00 P.M GMT+2 +:release-by: Tomer Nosrati + +Celery v5.4 continues our effort to provide improved stability in production +environments. The release candidate version is available for testing. +The official release is planned for March-April 2024. + +- New Config: worker_enable_prefetch_count_reduction (#8581) +- Added "Serverless" section to Redis doc (redis.rst) (#8640) +- Upstash's Celery example repo link fix (#8665) +- Update mypy version (#8679) +- Update cryptography dependency to 41.0.7 (#8690) +- Add type annotations to celery/utils/nodenames.py (#8667) +- Issue 3426. Adding myself to the contributors. (#8696) +- Bump actions/setup-python from 4 to 5 (#8701) +- Fixed bug where chord.link_error() throws an exception on a dict type errback object (#8702) +- Bump github/codeql-action from 2 to 3 (#8725) +- Fixed multiprocessing integration tests not running on Mac (#8727) +- Added make docker-docs (#8729) +- Fix DeprecationWarning: datetime.datetime.utcnow() (#8726) +- Remove `new` adjective in docs (#8743) +- add type annotation to celery/utils/sysinfo.py (#8747) +- add type annotation to celery/utils/iso8601.py (#8750) +- Change type annotation to celery/utils/iso8601.py (#8752) +- Update test deps (#8754) +- Mark flaky: test_asyncresult_get_cancels_subscription() (#8757) +- change _read_as_base64 (b64encode returns bytes) on celery/utils/term.py (#8759) +- Replace string concatenation with fstring on celery/utils/term.py (#8760) +- Add type annotation to celery/utils/term.py (#8755) +- Skipping test_tasks::test_task_accepted (#8761) +- Updated concurrency docs page. (#8753) +- Changed pyup -> dependabot for updating dependencies (#8764) +- Bump isort from 5.12.0 to 5.13.2 (#8772) +- Update elasticsearch requirement from <=8.11.0 to <=8.11.1 (#8775) +- Bump sphinx-click from 4.4.0 to 5.1.0 (#8774) +- Bump python-memcached from 1.59 to 1.61 (#8776) +- Update elastic-transport requirement from <=8.10.0 to <=8.11.0 (#8780) +- python-memcached==1.61 -> python-memcached>=1.61 (#8787) +- Remove usage of utcnow (#8791) +- Smoke Tests (#8793) +- Moved smoke tests to their own workflow (#8797) +- Bugfix: Worker not consuming tasks after Redis broker restart (#8796) +- Bugfix: Missing id on chain (#8798) diff --git a/docs/history/index.rst b/docs/history/index.rst index 496059e22b4..bb2ac38afa7 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -14,7 +14,9 @@ version please visit :ref:`changelog`. :maxdepth: 2 whatsnew-5.4 + changelog-5.4 whatsnew-5.3 + changelog-5.3 whatsnew-5.1 changelog-5.1 whatsnew-5.0 From edbbdf67c2b0f107b68014c506b62ed6f8b67883 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 23 Jul 2024 21:36:21 +0300 Subject: [PATCH 0763/1051] Fixed a few documentation build warnings (#9145) --- docs/history/changelog-5.3.rst | 25 ------------------------- docs/history/changelog-5.4.rst | 6 ------ docs/history/whatsnew-5.3.rst | 2 ++ 3 files changed, 2 insertions(+), 31 deletions(-) diff --git a/docs/history/changelog-5.3.rst b/docs/history/changelog-5.3.rst index 7b5802a8359..1c51eeffa4f 100644 --- a/docs/history/changelog-5.3.rst +++ b/docs/history/changelog-5.3.rst @@ -8,8 +8,6 @@ This document contains change notes for bugfix & new features in the & 5.3.x series, please see :ref:`whatsnew-5.3` for an overview of what's new in Celery 5.3. -.. _version-5.3.6: - 5.3.6 ===== @@ -28,8 +26,6 @@ The code changes are mostly fix for regressions. More details can be found below - Update elasticsearch version (#8656) - Propagates more ImportErrors during autodiscovery (#8632) -.. _version-5.3.5: - 5.3.5 ===== @@ -94,8 +90,6 @@ The code changes are mostly fix for regressions. More details can be found below - [documentation] broker_connection_max_retries of 0 does not mean "retry forever" (#8626) - added 2 debian package for better stability in Docker (#8629) -.. _version-5.3.4: - 5.3.4 ===== @@ -151,8 +145,6 @@ The code changes are mostly fix for regressions. More details can be found below - Revert "Add Semgrep to CI" (#8477) - Revert "Revert "Add Semgrep to CI"" (#8478) -.. _version-5.3.3: - 5.3.3 (Yanked) ============== @@ -167,8 +159,6 @@ The code changes are mostly fix for regressions. More details can be found below - Fixed changelog for 5.3.2 release docs. -.. _version-5.3.2: - 5.3.2 (Yanked) ============== @@ -216,8 +206,6 @@ The code changes are mostly fix for regressions. More details can be found below - Fixed AttributeError: 'str' object has no attribute (#8463) - Upgraded Kombu from 5.3.1 -> 5.3.2 (#8468) -.. _version-5.3.1: - 5.3.1 ===== @@ -235,8 +223,6 @@ The code changes are mostly fix for regressions. More details can be found below - Fixed a small float value of retry_backoff (#8295). - Limit pyro4 up to python 3.10 only as it is (#8324). -.. _version-5.3.0: - 5.3.0 ===== @@ -247,8 +233,6 @@ The code changes are mostly fix for regressions. More details can be found below - Update librabbitmq.txt > 2.0.0 (#8292). - Upgrade syntax to py3.8 (#8281). -.. _version-5.3.0rc2: - 5.3.0rc2 ======== @@ -263,9 +247,6 @@ The code changes are mostly fix for regressions. More details can be found below - Update redis.txt to 4.5 (#8278). - Update kombu>=5.3.0rc2. - -.. _version-5.3.0rc1: - 5.3.0rc1 ======== @@ -327,8 +308,6 @@ The code changes are mostly fix for regressions. More details can be found below - Renamed revoked_headers to revoked_stamps by @Nusnus in #8210 - Ensure argument for map is JSON serializable by @candleindark in #8229 -.. _version-5.3.0b2: - 5.3.0b2 ======= @@ -463,8 +442,6 @@ The code changes are mostly fix for regressions. More details can be found below - Fix command for starting flower with specified broker URL by @ShukantPal in #8071 - Improve documentation on ETA/countdown tasks (#8069) by @norbertcyran in #8075 -.. _version-5.3.0b1: - 5.3.0b1 ======= @@ -482,8 +459,6 @@ The code changes are mostly fix for regressions. More details can be found below - Only clear the cache if there are no active writers. - Billiard 4.0.1 -.. _version-5.3.0a1: - 5.3.0a1 ======= diff --git a/docs/history/changelog-5.4.rst b/docs/history/changelog-5.4.rst index 44cf6b74600..04ca1ce9663 100644 --- a/docs/history/changelog-5.4.rst +++ b/docs/history/changelog-5.4.rst @@ -8,8 +8,6 @@ This document contains change notes for bugfix & new features in the & 5.4.x series, please see :ref:`whatsnew-5.4` for an overview of what's new in Celery 5.4. -.. _version-5.4.0: - 5.4.0 ===== @@ -93,8 +91,6 @@ Changes since 5.4.0rc2 - Added changelog for v5.4.0 (#8973) - Bump version: 5.4.0rc2 → 5.4.0 (#8974) -.. _version-5.4.0rc2: - 5.4.0rc2 ======== @@ -150,8 +146,6 @@ Changes since 5.4.0rc2 - Update pytest-cov to 5.0.0 (#8924) - Bump pytest-celery to 1.0.0rc2 (#8928) -.. _version-5.4.0rc1: - 5.4.0rc1 ======== diff --git a/docs/history/whatsnew-5.3.rst b/docs/history/whatsnew-5.3.rst index 24ca6838ebb..4ccccb69224 100644 --- a/docs/history/whatsnew-5.3.rst +++ b/docs/history/whatsnew-5.3.rst @@ -278,6 +278,7 @@ Dispatched in the parent process, just before new child process is created in th It can be used to clean up instances that don't behave well when forking. .. code-block:: python + @signals.worker_before_create_process.connect def clean_channels(**kwargs): grpc_singleton.clean_channel() @@ -303,6 +304,7 @@ To configure the global keyprefix for the Redis result backend, use the .. code-block:: python + app.conf.result_backend_transport_options = { 'global_keyprefix': 'my_prefix_' } From 9fa649b3ba50d38dcee00023b5c63c591c1518c0 Mon Sep 17 00:00:00 2001 From: klein Date: Wed, 24 Jul 2024 18:39:11 +0800 Subject: [PATCH 0764/1051] =?UTF-8?q?=F0=9F=93=83=20docs(README):=20link?= =?UTF-8?q?=20invalid=20(#9148)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit this chinese document is no longer being maintained. so should be removed. --- README.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.rst b/README.rst index c2737fb6ea2..7a03be1db87 100644 --- a/README.rst +++ b/README.rst @@ -229,8 +229,6 @@ Documentation The `latest documentation`_ is hosted at Read The Docs, containing user guides, tutorials, and an API reference. -最新的中文文档托管在 https://www.celerycn.io/ 中,包含用户指南、教程、API接口等。 - .. _`latest documentation`: https://docs.celeryq.dev/en/latest/ .. _celery-installation: From c939be50626d6d878f0ff2d58447d1e1cfc1df47 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 24 Jul 2024 16:46:45 +0300 Subject: [PATCH 0765/1051] Prepare for (pre) release: v5.5.0b1 (#9146) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added Changelog for v5.5.0b1 * Added history placeholder docs for v5.5.0 * Bump version: 5.4.0 → 5.5.0b1 --- .bumpversion.cfg | 2 +- Changelog.rst | 118 +++++++++++++++++++++++- README.rst | 6 +- celery/__init__.py | 4 +- docs/django/first-steps-with-django.rst | 2 +- docs/history/changelog-5.5.rst | 7 ++ docs/history/index.rst | 2 + docs/history/whatsnew-5.5.rst | 15 +++ docs/includes/introduction.txt | 2 +- 9 files changed, 145 insertions(+), 13 deletions(-) create mode 100644 docs/history/changelog-5.5.rst create mode 100644 docs/history/whatsnew-5.5.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 46fe5a41ff2..f6606dff29f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.4.0 +current_version = 5.5.0b1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 985be8f1a17..ad3a58d3519 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -5,8 +5,116 @@ ================ This document contains change notes for bugfix & new features -in the main branch & 5.4.x series, please see :ref:`whatsnew-5.4` for -an overview of what's new in Celery 5.4. +in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for +an overview of what's new in Celery 5.5. + +.. _version-5.5.0b1: + +5.5.0b1 +======= + +:release-date: 2024-07-24 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 1 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the release-candidate for Kombu v5.4.0. This beta release has been upgraded to use the new +Kombu RC version, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- (docs): use correct version celery v.5.4.x (#8975) +- Update mypy to 1.10.0 (#8977) +- Limit pymongo<4.7 when Python <= 3.10 due to breaking changes in 4.7 (#8988) +- Bump pytest from 8.1.1 to 8.2.0 (#8987) +- Update README to Include FastAPI in Framework Integration Section (#8978) +- Clarify return values of ..._on_commit methods (#8984) +- add kafka broker docs (#8935) +- Limit pymongo<4.7 regardless of Python version (#8999) +- Update pymongo[srv] requirement from <4.7,>=4.0.2 to >=4.0.2,<4.8 (#9000) +- Update elasticsearch requirement from <=8.13.0 to <=8.13.1 (#9004) +- security: SecureSerializer: support generic low-level serializers (#8982) +- don't kill if pid same as file (#8997) (#8998) +- Update cryptography to 42.0.6 (#9005) +- Bump cryptography from 42.0.6 to 42.0.7 (#9009) +- Added -vv to unit, integration and smoke tests (#9014) +- SecuritySerializer: ensure pack separator will not be conflicted with serialized fields (#9010) +- Update sphinx-click to 5.2.2 (#9025) +- Bump sphinx-click from 5.2.2 to 6.0.0 (#9029) +- Fix a typo to display the help message in first-steps-with-django (#9036) +- Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039) +- Fix certificate validity check (#9037) +- Revert "Pinned requests to v2.31.0 due to docker-py bug #3256" (#9043) +- Bump pytest from 8.2.0 to 8.2.1 (#9035) +- Update elasticsearch requirement from <=8.13.1 to <=8.13.2 (#9045) +- Fix detection of custom task set as class attribute with Django (#9038) +- Update elastic-transport requirement from <=8.13.0 to <=8.13.1 (#9050) +- Bump pycouchdb from 1.14.2 to 1.16.0 (#9052) +- Update pytest to 8.2.2 (#9060) +- Bump cryptography from 42.0.7 to 42.0.8 (#9061) +- Update elasticsearch requirement from <=8.13.2 to <=8.14.0 (#9069) +- [enhance feature] Crontab schedule: allow using month names (#9068) +- Enhance tox environment: [testenv:clean] (#9072) +- Clarify docs about Reserve one task at a time (#9073) +- GCS docs fixes (#9075) +- Use hub.remove_writer instead of hub.remove for write fds (#4185) (#9055) +- Class method to process crontab string (#9079) +- Fixed smoke tests env bug when using integration tasks that rely on Redis (#9090) +- Bugfix - a task will run multiple times when chaining chains with groups (#9021) +- Bump mypy from 1.10.0 to 1.10.1 (#9096) +- Don't add a separator to global_keyprefix if it already has one (#9080) +- Update pymongo[srv] requirement from <4.8,>=4.0.2 to >=4.0.2,<4.9 (#9111) +- Added missing import in examples for Django (#9099) +- Bump Kombu to v5.4.0rc1 (#9117) +- Removed skipping Redis in t/smoke/tests/test_consumer.py tests (#9118) +- Update pytest-subtests to 0.13.0 (#9120) +- Increased smoke tests CI timeout (#9122) +- Bump Kombu to v5.4.0rc2 (#9127) +- Update zstandard to 0.23.0 (#9129) +- Update pytest-subtests to 0.13.1 (#9130) +- Changed retry to tenacity in smoke tests (#9133) +- Bump mypy from 1.10.1 to 1.11.0 (#9135) +- Update cryptography to 43.0.0 (#9138) +- Update pytest to 8.3.1 (#9137) +- Added support for Quorum Queues (#9121) +- Bump Kombu to v5.4.0rc3 (#9139) +- Cleanup in Changelog.rst (#9141) +- Update Django docs for CELERY_CACHE_BACKEND (#9143) +- Added missing docs to previous releases (#9144) +- Fixed a few documentation build warnings (#9145) +- docs(README): link invalid (#9148) +- Prepare for (pre) release: v5.5.0b1 (#9146) .. _version-5.4.0: @@ -31,7 +139,7 @@ With our enhanced QA capabilities, we are now prepared to address the core issue The rest of the changes for this release are grouped below, with the changes from the latest release candidate listed at the end. Changes -------- +~~~~~~~ - Add a Task class specialised for Django (#8491) - Add Google Cloud Storage (GCS) backend (#8868) - Added documentation to the smoke tests infra (#8970) @@ -57,7 +165,7 @@ Changes - Updated concurrency docs page. (#8753) Dependencies Updates --------------------- +~~~~~~~~~~~~~~~~~~~~ - Bump actions/setup-python from 4 to 5 (#8701) - Bump codecov/codecov-action from 3 to 4 (#8831) - Bump isort from 5.12.0 to 5.13.2 (#8772) @@ -79,7 +187,7 @@ Dependencies Updates - Upgraded Sphinx from v5.3.0 to v7.x.x (#8803) Changes since 5.4.0rc2 ----------------------- +~~~~~~~~~~~~~~~~~~~~~~~ - Update elastic-transport requirement from <=8.12.0 to <=8.13.0 (#8933) - Update elasticsearch requirement from <=8.12.1 to <=8.13.0 (#8934) - Hotfix: Smoke tests didn't allow customizing the worker's command arguments, now it does (#8937) diff --git a/README.rst b/README.rst index 7a03be1db87..dd033be8c9a 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.4.0 (opalescent) +:Version: 5.5.0b1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ @@ -58,7 +58,7 @@ in such a way that the client enqueues an URL to be requested by a worker. What do I need? =============== -Celery version 5.3.5 runs on: +Celery version 5.5.x runs on: - Python (3.8, 3.9, 3.10, 3.11, 3.12) - PyPy3.9+ (v7.3.12+) @@ -92,7 +92,7 @@ Get Started =========== If this is the first time you're trying to use Celery, or you're -new to Celery v5.4.x coming from previous versions then you should read our +new to Celery v5.5.x coming from previous versions then you should read our getting started tutorials: - `First steps with Celery`_ diff --git a/celery/__init__.py b/celery/__init__.py index 5b93aa4bf5b..9dec1c0cca8 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -15,9 +15,9 @@ # Lazy loading from . import local -SERIES = 'opalescent' +SERIES = 'immunity' -__version__ = '5.4.0' +__version__ = '5.5.0b1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index 28654a633a0..8ac28d342e3 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -19,7 +19,7 @@ Using Celery with Django .. note:: - Celery 5.4.x supports Django 2.2 LTS or newer versions. + Celery 5.5.x supports Django 2.2 LTS or newer versions. Please use Celery 5.2.x for versions older than Django 2.2 or Celery 4.4.x if your Django version is older than 1.11. To use Celery with your Django project you must first define diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst new file mode 100644 index 00000000000..dd58c2492ed --- /dev/null +++ b/docs/history/changelog-5.5.rst @@ -0,0 +1,7 @@ +.. _changelog-5.5: + +================ + Change history +================ + +TBD diff --git a/docs/history/index.rst b/docs/history/index.rst index bb2ac38afa7..22cd146a1f5 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -13,6 +13,8 @@ version please visit :ref:`changelog`. .. toctree:: :maxdepth: 2 + whatsnew-5.5 + changelog-5.5 whatsnew-5.4 changelog-5.4 whatsnew-5.3 diff --git a/docs/history/whatsnew-5.5.rst b/docs/history/whatsnew-5.5.rst new file mode 100644 index 00000000000..09e6aabb0ae --- /dev/null +++ b/docs/history/whatsnew-5.5.rst @@ -0,0 +1,15 @@ +.. _whatsnew-5.5: + +========================================= + What's new in Celery 5.5 (Immunity) +========================================= +:Author: Tomer Nosrati (``tomer.nosrati at gmail.com``). + +.. sidebar:: Change history + + What's new documents describe the changes in major versions, + we also have a :ref:`changelog` that lists the changes in bugfix + releases (0.0.x), while older series are archived under the :ref:`history` + section. + +TBD diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 267137202ae..19cbdd61cb7 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.4.0 (opalescent) +:Version: 5.5.0b1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 5e26553219da10f65f14d59573de18b5e366a693 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 22:16:34 +0000 Subject: [PATCH 0766/1051] Bump pytest from 8.3.1 to 8.3.2 Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.1 to 8.3.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.3.1...8.3.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 493fc6df658..524d7c6d83d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.3.1 +pytest==8.3.2 pytest-celery[all]>=1.0.0 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 From 1d5c1ccc09e49c5b9c6ecdd7e4b819d01ec50ace Mon Sep 17 00:00:00 2001 From: Devid <13779643+sevdog@users.noreply.github.com> Date: Mon, 29 Jul 2024 12:50:28 +0100 Subject: [PATCH 0767/1051] Remove setuptools deprecated test command from imports (#9159) --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index aef46a1a15f..324f6c0e607 100755 --- a/setup.py +++ b/setup.py @@ -4,7 +4,6 @@ import re import setuptools -import setuptools.command.test NAME = 'celery' From 327e13600dded9e83fb4e31cbd9f9853f6f32da5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 20:01:57 +0300 Subject: [PATCH 0768/1051] [pre-commit.ci] pre-commit autoupdate (#9160) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.16.0 → v3.17.0](https://github.com/asottile/pyupgrade/compare/v3.16.0...v3.17.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index add6cd19744..4544b4d883d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.16.0 + rev: v3.17.0 hooks: - id: pyupgrade args: ["--py38-plus"] From 64a8cb6585b246c7f19d3fb67ee7fde118681f5a Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 30 Jul 2024 08:19:35 -0700 Subject: [PATCH 0769/1051] Pin pre-commit to latest version 3.8.0 from Python 3.9 (#9156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Pin pre-commit to latest version 3.8.0 * Use pre-commit v3.8.0 from Python 3.9 up * Update requirements/test.txt * Changed from "> ‘3.9'" to ">= '3.9'" --------- Co-authored-by: Tomer Nosrati Co-authored-by: Asif Saif Uddin --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 524d7c6d83d..bf882b70e0d 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ moto>=4.1.11,<5.1.0 # typing extensions mypy==1.11.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' -pre-commit>=3.6.1; python_version >= '3.9' +pre-commit>=3.8.0; python_version >= '3.9' -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From c56ca3f867d7cd03e3e4ccefb17a243714e5daa2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:34:49 +0300 Subject: [PATCH 0770/1051] Bump mypy from 1.11.0 to 1.11.1 (#9164) Bumps [mypy](https://github.com/python/mypy) from 1.11.0 to 1.11.1. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.11...v1.11.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index bf882b70e0d..58265c8cad9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.2.1 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.11.0; platform_python_implementation=="CPython" +mypy==1.11.1; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.8.0; python_version >= '3.9' -r extras/yaml.txt From f12abdfa4dc5976d48869dd6772c44c64f07e150 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 2 Aug 2024 20:32:27 +0300 Subject: [PATCH 0771/1051] Change "docker-compose" to "docker compose" in Makefile (#9169) --- Makefile | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 4ca210d1d98..f333376ad1c 100644 --- a/Makefile +++ b/Makefile @@ -177,36 +177,36 @@ authorcheck: .PHONY: docker-build docker-build: - @docker-compose -f docker/docker-compose.yml build + @docker compose -f docker/docker-compose.yml build .PHONY: docker-lint docker-lint: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e lint + @docker compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e lint .PHONY: docker-unit-tests docker-unit-tests: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-unit -- $(filter-out $@,$(MAKECMDGOALS)) + @docker compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-unit -- $(filter-out $@,$(MAKECMDGOALS)) # Integration tests are not fully supported when running in a docker container yet so we allow them to # gracefully fail until fully supported. # TODO: Add documentation (in help command) when fully supported. .PHONY: docker-integration-tests docker-integration-tests: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-integration-docker -- --maxfail=1000 + @docker compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery tox -e 3.12-integration-docker -- --maxfail=1000 .PHONY: docker-bash docker-bash: - @docker-compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery bash + @docker compose -f docker/docker-compose.yml run --rm -w /home/developer/celery celery bash .PHONY: docker-docs docker-docs: - @docker-compose -f docker/docker-compose.yml up --build -d docs + @docker compose -f docker/docker-compose.yml up --build -d docs @echo "Waiting 60 seconds for docs service to build the documentation inside the container..." - @timeout 60 sh -c 'until docker logs $$(docker-compose -f docker/docker-compose.yml ps -q docs) 2>&1 | \ + @timeout 60 sh -c 'until docker logs $$(docker compose -f docker/docker-compose.yml ps -q docs) 2>&1 | \ grep "build succeeded"; do sleep 1; done' || \ (echo "Error! - run manually: docker compose -f ./docker/docker-compose.yml up --build docs"; \ - docker-compose -f docker/docker-compose.yml logs --tail=50 docs; false) - @docker-compose -f docker/docker-compose.yml down + docker compose -f docker/docker-compose.yml logs --tail=50 docs; false) + @docker compose -f docker/docker-compose.yml down .PHONY: catch-all %: catch-all From eb34003a9099796a8d19fb30261bf89cd3d0a722 Mon Sep 17 00:00:00 2001 From: Mathias Ertl Date: Sat, 3 Aug 2024 16:47:41 +0200 Subject: [PATCH 0772/1051] update python versions and docker compose (#9171) * update python versions in CONTRIBUTING.rst * remove version flag, which is obsolete in newer docker compose versions --- CONTRIBUTING.rst | 26 +++++++++++++------------- docker/docker-compose.yml | 4 +--- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f3ffbbdd3af..2a2d239320d 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -465,13 +465,13 @@ Docker image can be built via: .. code-block:: console - $ docker-compose build celery + $ docker compose build celery and run via: .. code-block:: console - $ docker-compose run --rm celery + $ docker compose run --rm celery where is a command to execute in a Docker container. The `--rm` flag indicates that the container should be removed after it is exited and is useful @@ -486,7 +486,7 @@ Some useful commands to run: * ``make test`` To run the test suite. - **Note:** This will run tests using python 3.8 by default. + **Note:** This will run tests using python 3.12 by default. * ``tox`` @@ -494,30 +494,30 @@ Some useful commands to run: **Note:** This command will run tests for every environment defined in :file:`tox.ini`. It takes a while. -* ``pyenv exec python{3.6,3.7,3.8,3.9} -m pytest t/unit`` +* ``pyenv exec python{3.8,3.9,3.10,3.11,3.12} -m pytest t/unit`` To run unit tests using pytest. - **Note:** ``{3.6,3.7,3.8,3.9}`` means you can use any of those options. - e.g. ``pyenv exec python3.7 -m pytest t/unit`` + **Note:** ``{3.8,3.9,3.10,3.11,3.12}`` means you can use any of those options. + e.g. ``pyenv exec python3.12 -m pytest t/unit`` -* ``pyenv exec python{3.6,3.7,3.8,3.9} -m pytest t/integration`` +* ``pyenv exec python{3.8,3.9,3.10,3.11,3.12} -m pytest t/integration`` To run integration tests using pytest - **Note:** ``{3.6,3.7,3.8,3.9}`` means you can use any of those options. - e.g. ``pyenv exec python3.7 -m pytest t/unit`` + **Note:** ``{3.8,3.9,3.10,3.11,3.12}`` means you can use any of those options. + e.g. ``pyenv exec python3.12 -m pytest t/unit`` By default, docker-compose will mount the Celery and test folders in the Docker container, allowing code changes and testing to be immediately visible inside the Docker container. Environment variables, such as the broker and backend to use are also defined in the :file:`docker/docker-compose.yml` file. -By running ``docker-compose build celery`` an image will be created with the +By running ``docker compose build celery`` an image will be created with the name ``celery/celery:dev``. This docker image has every dependency needed for development installed. ``pyenv`` is used to install multiple python -versions, the docker image offers python 3.6, 3.7, 3.8 and 3.9. -The default python version is set to 3.8. +versions, the docker image offers python 3.8, 3.9, 3.10, 3.11 and 3.12. +The default python version is set to 3.12. The :file:`docker-compose.yml` file defines the necessary environment variables to run integration tests. The ``celery`` service also mounts the codebase @@ -527,7 +527,7 @@ as global module for development. If you prefer, you can also run ``python -m pip install -e .`` to install the codebase in development mode. If you would like to run a Django or stand alone project to manually test or -debug a feature, you can use the image built by `docker-compose` and mount +debug a feature, you can use the image built by `docker compose` and mount your custom code. Here's an example: Assuming a folder structure such as: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 221e6ddb3ef..c31138f1942 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3' - services: celery: build: @@ -47,4 +45,4 @@ services: - ../docs:/docs:z ports: - "7001:7000" - command: /start-docs \ No newline at end of file + command: /start-docs From 97b2d1a9eb5604ae5490882f683ec87cb6298be8 Mon Sep 17 00:00:00 2001 From: Mathias Ertl Date: Sat, 3 Aug 2024 17:50:06 +0200 Subject: [PATCH 0773/1051] Add support for Pydantic model validation/serialization (fixes #8751) (#9023) * add pydantic wrapper (fixes #8751) * Fixed stamping smoke tests docker build failure * add example for pydantic (de)serialization --------- Co-authored-by: Tomer Nosrati --- celery/app/base.py | 76 ++++++++++++- docs/userguide/tasks.rst | 57 ++++++++++ examples/pydantic/__init__.py | 0 examples/pydantic/tasks.py | 21 ++++ requirements/extras/pydantic.txt | 1 + requirements/test.txt | 1 + setup.py | 1 + t/integration/tasks.py | 18 +++ t/integration/test_tasks.py | 20 +++- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 3 +- t/unit/app/test_app.py | 188 ++++++++++++++++++++++++++++++- 12 files changed, 381 insertions(+), 7 deletions(-) create mode 100644 examples/pydantic/__init__.py create mode 100644 examples/pydantic/tasks.py create mode 100644 requirements/extras/pydantic.txt diff --git a/celery/app/base.py b/celery/app/base.py index 63f3d54abec..c1bb9b790b5 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -1,8 +1,11 @@ """Actual App instance implementation.""" +import functools +import importlib import inspect import os import sys import threading +import typing import warnings from collections import UserDict, defaultdict, deque from datetime import datetime @@ -43,6 +46,10 @@ from .utils import (AppPickler, Settings, _new_key_to_old, _old_key_to_new, _unpickle_app, _unpickle_app_v2, appstr, bugreport, detect_settings) +if typing.TYPE_CHECKING: # pragma: no cover # codecov does not capture this + # flake8 marks the BaseModel import as unused, because the actual typehint is quoted. + from pydantic import BaseModel # noqa: F401 + __all__ = ('Celery',) logger = get_logger(__name__) @@ -92,6 +99,59 @@ def _after_fork_cleanup_app(app): logger.info('after forker raised exception: %r', exc, exc_info=1) +def pydantic_wrapper( + app: "Celery", + task_fun: typing.Callable[..., typing.Any], + task_name: str, + strict: bool = True, + context: typing.Optional[typing.Dict[str, typing.Any]] = None, + dump_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = None +): + """Wrapper to validate arguments and serialize return values using Pydantic.""" + try: + pydantic = importlib.import_module('pydantic') + except ModuleNotFoundError as ex: + raise ImproperlyConfigured('You need to install pydantic to use pydantic model serialization.') from ex + + BaseModel: typing.Type['BaseModel'] = pydantic.BaseModel # noqa: F811 # only defined when type checking + + if context is None: + context = {} + if dump_kwargs is None: + dump_kwargs = {} + dump_kwargs.setdefault('mode', 'json') + + task_signature = inspect.signature(task_fun) + + @functools.wraps(task_fun) + def wrapper(*task_args, **task_kwargs): + # Validate task parameters if type hinted as BaseModel + bound_args = task_signature.bind(*task_args, **task_kwargs) + for arg_name, arg_value in bound_args.arguments.items(): + arg_annotation = task_signature.parameters[arg_name].annotation + if issubclass(arg_annotation, BaseModel): + bound_args.arguments[arg_name] = arg_annotation.model_validate( + arg_value, + strict=strict, + context={**context, 'celery_app': app, 'celery_task_name': task_name}, + ) + + # Call the task with (potentially) converted arguments + returned_value = task_fun(*bound_args.args, **bound_args.kwargs) + + # Dump Pydantic model if the returned value is an instance of pydantic.BaseModel *and* its + # class matches the typehint + if ( + isinstance(returned_value, BaseModel) + and isinstance(returned_value, task_signature.return_annotation) + ): + return returned_value.model_dump(**dump_kwargs) + + return returned_value + + return wrapper + + class PendingConfiguration(UserDict, AttributeDictMixin): # `app.conf` will be of this type before being explicitly configured, # meaning the app can keep any configuration set directly @@ -469,13 +529,27 @@ def cons(app): def type_checker(self, fun, bound=False): return staticmethod(head_from_fun(fun, bound=bound)) - def _task_from_fun(self, fun, name=None, base=None, bind=False, **options): + def _task_from_fun( + self, + fun, + name=None, + base=None, + bind=False, + pydantic: bool = False, + pydantic_strict: bool = True, + pydantic_context: typing.Optional[typing.Dict[str, typing.Any]] = None, + pydantic_dump_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = None, + **options, + ): if not self.finalized and not self.autofinalize: raise RuntimeError('Contract breach: app not finalized') name = name or self.gen_task_name(fun.__name__, fun.__module__) base = base or self.Task if name not in self._tasks: + if pydantic is True: + fun = pydantic_wrapper(self, fun, name, pydantic_strict, pydantic_context, pydantic_dump_kwargs) + run = fun if bind else staticmethod(fun) task = type(fun.__name__, (base,), dict({ 'app': self, diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 1fc99c39962..88d1b8022ed 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -795,6 +795,62 @@ You can also set `autoretry_for`, `max_retries`, `retry_backoff`, `retry_backoff This allows to exclude some exceptions that match `autoretry_for `:attr: but for which you don't want a retry. +.. _task-pydantic: + +Argument validation with Pydantic +================================= + +.. versionadded:: 5.5.0 + +You can use Pydantic_ to validate and convert arguments as well as serializing +results based on typehints by passing ``pydantic=True``. For example: + +.. code-block:: python + + from pydantic import BaseModel + + class ArgModel(BaseModel): + value: int + + class ReturnModel(BaseModel): + value: str + + @app.task(pydantic=True) + def x(arg: ArgModel) -> ReturnModel: + # args/kwargs type hinted as Pydantic model will be converted + assert isinstance(arg, ArgModel) + + # The returned model will be converted to a dict automatically + return ReturnModel(value=f"example: {arg.value}") + +The task can then be called using a dict matching the model, and you'll receive +the returned model "dumped" (serialized using ``BaseModel.model_dump()``): + +.. code-block:: python + + >>> result = x.delay({'value': 1}) + >>> result.get(timeout=1) + {'value': 'example: 1'} + +There are a few more options influencing Pydantic behavior: + +.. attribute:: Task.pydantic_strict + + By default, `strict mode `_ + is enabled. You can pass ``False`` to disable strict model validation. + +.. attribute:: Task.pydantic_context + + Pass `additional validation context + `_ during + Pydantic model validation. The context already includes the application object as + ``celery_app`` and the task name as ``celery_task_name`` by default. + +.. attribute:: Task.pydantic_dump_kwargs + + When serializing a result, pass these additional arguments to ``dump_kwargs()``. + By default, only ``mode='json'`` is passed. + .. _task-options: @@ -2091,3 +2147,4 @@ To make API calls to `Akismet`_ I use the `akismet.py`_ library written by .. _`Michael Foord`: http://www.voidspace.org.uk/ .. _`exponential backoff`: https://en.wikipedia.org/wiki/Exponential_backoff .. _`jitter`: https://en.wikipedia.org/wiki/Jitter +.. _`Pydantic`: https://docs.pydantic.dev/ diff --git a/examples/pydantic/__init__.py b/examples/pydantic/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/pydantic/tasks.py b/examples/pydantic/tasks.py new file mode 100644 index 00000000000..70b821338c1 --- /dev/null +++ b/examples/pydantic/tasks.py @@ -0,0 +1,21 @@ +from pydantic import BaseModel + +from celery import Celery + +app = Celery('tasks', broker='amqp://') + + +class ArgModel(BaseModel): + value: int + + +class ReturnModel(BaseModel): + value: str + + +@app.task(pydantic=True) +def x(arg: ArgModel) -> ReturnModel: + # args/kwargs type hinted as Pydantic model will be converted + assert isinstance(arg, ArgModel) + # The returned model will be converted to a dict automatically + return ReturnModel(value=f"example: {arg.value}") diff --git a/requirements/extras/pydantic.txt b/requirements/extras/pydantic.txt new file mode 100644 index 00000000000..29ac1fa96c9 --- /dev/null +++ b/requirements/extras/pydantic.txt @@ -0,0 +1 @@ +pydantic>=2.4 diff --git a/requirements/test.txt b/requirements/test.txt index 58265c8cad9..bf569095bdb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -15,3 +15,4 @@ pre-commit>=3.8.0; python_version >= '3.9' -r extras/msgpack.txt -r extras/mongodb.txt -r extras/gcs.txt +-r extras/pydantic.txt diff --git a/setup.py b/setup.py index 324f6c0e607..8cfc1749389 100755 --- a/setup.py +++ b/setup.py @@ -30,6 +30,7 @@ 'mongodb', 'msgpack', 'pymemcache', + 'pydantic', 'pyro', 'pytest', 'redis', diff --git a/t/integration/tasks.py b/t/integration/tasks.py index f09492f3fd5..752db0278c3 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -2,6 +2,8 @@ from collections.abc import Iterable from time import sleep +from pydantic import BaseModel + from celery import Signature, Task, chain, chord, group, shared_task from celery.canvas import signature from celery.exceptions import SoftTimeLimitExceeded @@ -475,6 +477,22 @@ def replaced_with_me(): return True +class AddParameterModel(BaseModel): + x: int + y: int + + +class AddResultModel(BaseModel): + result: int + + +@shared_task(pydantic=True) +def add_pydantic(data: AddParameterModel) -> AddResultModel: + """Add two numbers, but with parameters and results using Pydantic model serialization.""" + value = data.x + data.y + return AddResultModel(result=value) + + if LEGACY_TASKS_DISABLED: class StampOnReplace(StampingVisitor): stamp = {"StampOnReplace": "This is the replaced task"} diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 87587119b15..060176e8b15 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -16,9 +16,9 @@ from celery.worker import state as worker_state from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection -from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, fail, - fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, retry_once_priority, - retry_unpickleable, return_properties, second_order_replace1, sleeping) +from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, add_pydantic, + fail, fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, + retry_once_priority, retry_unpickleable, return_properties, second_order_replace1, sleeping) TIMEOUT = 10 @@ -128,6 +128,20 @@ def test_ignore_result(self, manager): sleep(1) assert result.result is None + @flaky + def test_pydantic_annotations(self, manager): + """Tests task call with Pydantic model serialization.""" + results = [] + # Tests calling task only with args + for i in range(10): + results.append([i + i, add_pydantic.delay({'x': i, 'y': i})]) + for expected, result in results: + value = result.get(timeout=10) + assert value == {'result': expected} + assert result.status == 'SUCCESS' + assert result.ready() is True + assert result.successful() is True + @flaky def test_timeout(self, manager): """Testing timeout of getting results from tasks.""" diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 82427c19573..b932dd4b393 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -38,7 +38,7 @@ WORKDIR /celery COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ - -e /celery[redis,pymemcache] \ + -e /celery[redis,pymemcache,pydantic] \ pytest-celery>=1.0.0 # The workdir must be /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 699f290e119..87344cee2ad 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -38,7 +38,8 @@ EXPOSE 5678 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery>=1.0.0 + pytest-celery>=1.0.0 \ + pydantic>=2.4 # The workdir must be /app WORKDIR /app diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 4c92f475d42..1ca508d89b3 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1,16 +1,19 @@ import gc +import importlib import itertools import os import ssl import sys +import typing import uuid from copy import deepcopy from datetime import datetime, timedelta from datetime import timezone as datetime_timezone from pickle import dumps, loads -from unittest.mock import Mock, patch +from unittest.mock import DEFAULT, Mock, patch import pytest +from pydantic import BaseModel, ValidationInfo, model_validator from vine import promise from celery import Celery, _state @@ -505,6 +508,189 @@ def foo(): pass check.assert_called_with(foo) + def test_task_with_pydantic_with_no_args(self): + """Test a pydantic task with no arguments or return value.""" + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(): + check() + + assert foo() is None + check.assert_called_once() + + def test_task_with_pydantic_with_arg_and_kwarg(self): + """Test a pydantic task with simple (non-pydantic) arg/kwarg and return value.""" + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: int, kwarg: bool = True) -> int: + check(arg, kwarg=kwarg) + return 1 + + assert foo(0) == 1 + check.assert_called_once_with(0, kwarg=True) + + def test_task_with_pydantic_with_pydantic_arg_and_default_kwarg(self): + """Test a pydantic task with pydantic arg/kwarg and return value.""" + + class ArgModel(BaseModel): + arg_value: int + + class KwargModel(BaseModel): + kwarg_value: int + + kwarg_default = KwargModel(kwarg_value=1) + + class ReturnModel(BaseModel): + ret_value: int + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: ArgModel, kwarg: KwargModel = kwarg_default) -> ReturnModel: + check(arg, kwarg=kwarg) + return ReturnModel(ret_value=2) + + assert foo({'arg_value': 0}) == {'ret_value': 2} + check.assert_called_once_with(ArgModel(arg_value=0), kwarg=kwarg_default) + check.reset_mock() + + # Explicitly pass kwarg (but as argument) + assert foo({'arg_value': 3}, {'kwarg_value': 4}) == {'ret_value': 2} + check.assert_called_once_with(ArgModel(arg_value=3), kwarg=KwargModel(kwarg_value=4)) + check.reset_mock() + + # Explicitly pass all arguments as kwarg + assert foo(arg={'arg_value': 5}, kwarg={'kwarg_value': 6}) == {'ret_value': 2} + check.assert_called_once_with(ArgModel(arg_value=5), kwarg=KwargModel(kwarg_value=6)) + + def test_task_with_pydantic_with_task_name_in_context(self): + """Test that the task name is passed to as additional context.""" + + class ArgModel(BaseModel): + value: int + + @model_validator(mode='after') + def validate_context(self, info: ValidationInfo): + context = info.context + assert context + assert context.get('celery_task_name') == 't.unit.app.test_app.task' + return self + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def task(arg: ArgModel): + check(arg) + return 1 + + assert task({'value': 1}) == 1 + + def test_task_with_pydantic_with_strict_validation(self): + """Test a pydantic task with/without strict model validation.""" + + class ArgModel(BaseModel): + value: int + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True, pydantic_strict=True) + def strict(arg: ArgModel): + check(arg) + + @app.task(pydantic=True, pydantic_strict=False) + def loose(arg: ArgModel): + check(arg) + + # In Pydantic, passing an "exact int" as float works without strict validation + assert loose({'value': 1.0}) is None + check.assert_called_once_with(ArgModel(value=1)) + check.reset_mock() + + # ... but a non-strict value will raise an exception + with pytest.raises(ValueError): + loose({'value': 1.1}) + check.assert_not_called() + + # ... with strict validation, even an "exact int" will not work: + with pytest.raises(ValueError): + strict({'value': 1.0}) + check.assert_not_called() + + def test_task_with_pydantic_with_extra_context(self): + """Test passing additional validation context to the model.""" + + class ArgModel(BaseModel): + value: int + + @model_validator(mode='after') + def validate_context(self, info: ValidationInfo): + context = info.context + assert context, context + assert context.get('foo') == 'bar' + return self + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True, pydantic_context={'foo': 'bar'}) + def task(arg: ArgModel): + check(arg.value) + return 1 + + assert task({'value': 1}) == 1 + check.assert_called_once_with(1) + + def test_task_with_pydantic_with_dump_kwargs(self): + """Test passing keyword arguments to model_dump().""" + + class ArgModel(BaseModel): + value: int + + class RetModel(BaseModel): + value: datetime + unset_value: typing.Optional[int] = 99 # this would be in the output, if exclude_unset weren't True + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True, pydantic_dump_kwargs={'mode': 'python', 'exclude_unset': True}) + def task(arg: ArgModel) -> RetModel: + check(arg) + return RetModel(value=datetime(2024, 5, 14, tzinfo=timezone.utc)) + + assert task({'value': 1}) == {'value': datetime(2024, 5, 14, tzinfo=timezone.utc)} + check.assert_called_once_with(ArgModel(value=1)) + + def test_task_with_pydantic_with_pydantic_not_installed(self): + """Test configuring a task with Pydantic when pydantic is not installed.""" + + with self.Celery() as app: + @app.task(pydantic=True) + def task(): + return + + # mock function will raise ModuleNotFoundError only if pydantic is imported + def import_module(name, *args, **kwargs): + if name == 'pydantic': + raise ModuleNotFoundError('Module not found.') + return DEFAULT + + msg = r'^You need to install pydantic to use pydantic model serialization\.$' + with patch( + 'celery.app.base.importlib.import_module', + side_effect=import_module, + wraps=importlib.import_module + ): + with pytest.raises(ImproperlyConfigured, match=msg): + task() + def test_task_sets_main_name_MP_MAIN_FILE(self): from celery.utils import imports as _imports _imports.MP_MAIN_FILE = __file__ From 0df0f1f4dd15a545f76e70f02b7799c796e417ab Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 21:25:32 +0300 Subject: [PATCH 0774/1051] [pre-commit.ci] pre-commit autoupdate (#9175) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 7.1.0 → 7.1.1](https://github.com/PyCQA/flake8/compare/7.1.0...7.1.1) - [github.com/pre-commit/mirrors-mypy: v1.11.0 → v1.11.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.11.0...v1.11.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4544b4d883d..365aae1b0a9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: args: ["--py38-plus"] - repo: https://github.com/PyCQA/flake8 - rev: 7.1.0 + rev: 7.1.1 hooks: - id: flake8 @@ -30,7 +30,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.0 + rev: v1.11.1 hooks: - id: mypy pass_filenames: false From eec5172cba77e4644cb2a0d93ef77bae14a84f4f Mon Sep 17 00:00:00 2001 From: peerjakobsen Date: Tue, 6 Aug 2024 13:31:18 +0200 Subject: [PATCH 0775/1051] Allow local dynamodb to be installed on another host than localhost (#8965) Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- celery/backends/dynamodb.py | 13 +++++++++++-- t/unit/backends/test_dynamodb.py | 11 +++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index eee6f18adef..d5159353b00 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -1,5 +1,6 @@ """AWS DynamoDB result store backend.""" from collections import namedtuple +from ipaddress import ip_address from time import sleep, time from typing import Any, Dict @@ -96,9 +97,9 @@ def __init__(self, url=None, table_name=None, *args, **kwargs): aws_credentials_given = access_key_given - if region == 'localhost': + if region == 'localhost' or DynamoDBBackend._is_valid_ip(region): # We are using the downloadable, local version of DynamoDB - self.endpoint_url = f'http://localhost:{port}' + self.endpoint_url = f'http://{region}:{port}' self.aws_region = 'us-east-1' logger.warning( 'Using local-only DynamoDB endpoint URL: {}'.format( @@ -153,6 +154,14 @@ def __init__(self, url=None, table_name=None, *args, **kwargs): secret_access_key=aws_secret_access_key ) + @staticmethod + def _is_valid_ip(ip): + try: + ip_address(ip) + return True + except ValueError: + return False + def _get_client(self, access_key_id=None, secret_access_key=None): """Get client connection.""" if self._client is None: diff --git a/t/unit/backends/test_dynamodb.py b/t/unit/backends/test_dynamodb.py index c6004e410e6..12520aeeb9f 100644 --- a/t/unit/backends/test_dynamodb.py +++ b/t/unit/backends/test_dynamodb.py @@ -63,23 +63,26 @@ def test_get_client_explicit_endpoint(self): ) assert backend.endpoint_url == 'http://my.domain.com:666' - def test_get_client_local(self): + @pytest.mark.parametrize("dynamodb_host", [ + 'localhost', '127.0.0.1', + ]) + def test_get_client_local(self, dynamodb_host): table_creation_path = \ 'celery.backends.dynamodb.DynamoDBBackend._get_or_create_table' with patch('boto3.client') as mock_boto_client, \ patch(table_creation_path): backend = DynamoDBBackend( app=self.app, - url='dynamodb://@localhost:8000' + url=f'dynamodb://@{dynamodb_host}:8000' ) client = backend._get_client() assert backend.client is client mock_boto_client.assert_called_once_with( 'dynamodb', - endpoint_url='http://localhost:8000', + endpoint_url=f'http://{dynamodb_host}:8000', region_name='us-east-1' ) - assert backend.endpoint_url == 'http://localhost:8000' + assert backend.endpoint_url == f'http://{dynamodb_host}:8000' def test_get_client_credentials(self): table_creation_path = \ From 6560531827c64bc362460e39fb8b4024eff5e086 Mon Sep 17 00:00:00 2001 From: ZHOU Cheng Date: Tue, 6 Aug 2024 23:13:55 +0800 Subject: [PATCH 0776/1051] Terminate job implementation for gevent concurrency backend (#9083) * terminate job gevent implementation * add unittest * overwrite getcurrent * apply target * wrap target * support other params * set apply_target * support apply_timeout * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * code format * unittest: mock getpid in apply_timeout * fix kill call assert error * unittest: add test_make_killable_target --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati Co-authored-by: Omer Katz --- celery/concurrency/gevent.py | 58 ++++++++++++++++++++++++++----- docs/userguide/workers.rst | 2 +- t/unit/concurrency/test_gevent.py | 38 ++++++++++++++++++-- 3 files changed, 86 insertions(+), 12 deletions(-) diff --git a/celery/concurrency/gevent.py b/celery/concurrency/gevent.py index b0ea7e663f3..4855ae6fce2 100644 --- a/celery/concurrency/gevent.py +++ b/celery/concurrency/gevent.py @@ -1,4 +1,5 @@ """Gevent execution pool.""" +import functools from time import monotonic from kombu.asynchronous import timer as _timer @@ -16,15 +17,22 @@ # We cache globals and attribute lookups, so disable this warning. +def apply_target(target, args=(), kwargs=None, callback=None, + accept_callback=None, getpid=None, **_): + kwargs = {} if not kwargs else kwargs + return base.apply_target(target, args, kwargs, callback, accept_callback, + pid=getpid(), **_) + + def apply_timeout(target, args=(), kwargs=None, callback=None, - accept_callback=None, pid=None, timeout=None, + accept_callback=None, getpid=None, timeout=None, timeout_callback=None, Timeout=Timeout, apply_target=base.apply_target, **rest): kwargs = {} if not kwargs else kwargs try: with Timeout(timeout): return apply_target(target, args, kwargs, callback, - accept_callback, pid, + accept_callback, getpid(), propagate=(Timeout,), **rest) except Timeout: return timeout_callback(False, timeout) @@ -82,18 +90,22 @@ class TaskPool(base.BasePool): is_green = True task_join_will_block = False _pool = None + _pool_map = None _quick_put = None def __init__(self, *args, **kwargs): - from gevent import spawn_raw + from gevent import getcurrent, spawn_raw from gevent.pool import Pool self.Pool = Pool + self.getcurrent = getcurrent + self.getpid = lambda: id(getcurrent()) self.spawn_n = spawn_raw self.timeout = kwargs.get('timeout') super().__init__(*args, **kwargs) def on_start(self): self._pool = self.Pool(self.limit) + self._pool_map = {} self._quick_put = self._pool.spawn def on_stop(self): @@ -102,12 +114,14 @@ def on_stop(self): def on_apply(self, target, args=None, kwargs=None, callback=None, accept_callback=None, timeout=None, - timeout_callback=None, apply_target=base.apply_target, **_): + timeout_callback=None, apply_target=apply_target, **_): timeout = self.timeout if timeout is None else timeout - return self._quick_put(apply_timeout if timeout else apply_target, - target, args, kwargs, callback, accept_callback, - timeout=timeout, - timeout_callback=timeout_callback) + target = self._make_killable_target(target) + greenlet = self._quick_put(apply_timeout if timeout else apply_target, + target, args, kwargs, callback, accept_callback, + self.getpid, timeout=timeout, timeout_callback=timeout_callback) + self._add_to_pool_map(id(greenlet), greenlet) + return greenlet def grow(self, n=1): self._pool._semaphore.counter += n @@ -117,6 +131,34 @@ def shrink(self, n=1): self._pool._semaphore.counter -= n self._pool.size -= n + def terminate_job(self, pid, signal=None): + import gevent + + if pid in self._pool_map: + greenlet = self._pool_map[pid] + gevent.kill(greenlet) + @property def num_processes(self): return len(self._pool) + + @staticmethod + def _make_killable_target(target): + def killable_target(*args, **kwargs): + from greenlet import GreenletExit + try: + return target(*args, **kwargs) + except GreenletExit: + return (False, None, None) + + return killable_target + + def _add_to_pool_map(self, pid, greenlet): + self._pool_map[pid] = greenlet + greenlet.link( + functools.partial(self._cleanup_after_job_finish, pid=pid, pool_map=self._pool_map), + ) + + @staticmethod + def _cleanup_after_job_finish(greenlet, pool_map, pid): + del pool_map[pid] diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index cf82c522157..1304a6ad605 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -354,7 +354,7 @@ Commands ``revoke``: Revoking tasks -------------------------- -:pool support: all, terminate only supported by prefork and eventlet +:pool support: all, terminate only supported by prefork, eventlet and gevent :broker support: *amqp, redis* :command: :program:`celery -A proj control revoke ` diff --git a/t/unit/concurrency/test_gevent.py b/t/unit/concurrency/test_gevent.py index c0b24001d90..7382520e714 100644 --- a/t/unit/concurrency/test_gevent.py +++ b/t/unit/concurrency/test_gevent.py @@ -8,7 +8,6 @@ 'gevent.monkey', 'gevent.pool', 'gevent.signal', - 'greenlet', ) @@ -83,6 +82,38 @@ def test_pool(self): x._pool = [4, 5, 6] assert x.num_processes == 3 + def test_terminate_job(self): + func = Mock() + pool = TaskPool(10) + pool.on_start() + pool.on_apply(func) + + assert len(pool._pool_map.keys()) == 1 + pid = list(pool._pool_map.keys())[0] + greenlet = pool._pool_map[pid] + greenlet.link.assert_called_once() + + pool.terminate_job(pid) + import gevent + + gevent.kill.assert_called_once() + + def test_make_killable_target(self): + def valid_target(): + return "some result..." + + def terminating_target(): + from greenlet import GreenletExit + raise GreenletExit + + assert TaskPool._make_killable_target(valid_target)() == "some result..." + assert TaskPool._make_killable_target(terminating_target)() == (False, None, None) + + def test_cleanup_after_job_finish(self): + testMap = {'1': None} + TaskPool._cleanup_after_job_finish(None, testMap, '1') + assert len(testMap) == 0 + class test_apply_timeout: @@ -102,9 +133,10 @@ def __exit__(self, *exc_info): pass timeout_callback = Mock(name='timeout_callback') apply_target = Mock(name='apply_target') + getpid = Mock(name='getpid') apply_timeout( Mock(), timeout=10, callback=Mock(name='callback'), - timeout_callback=timeout_callback, + timeout_callback=timeout_callback, getpid=getpid, apply_target=apply_target, Timeout=Timeout, ) assert Timeout.value == 10 @@ -113,7 +145,7 @@ def __exit__(self, *exc_info): apply_target.side_effect = Timeout(10) apply_timeout( Mock(), timeout=10, callback=Mock(), - timeout_callback=timeout_callback, + timeout_callback=timeout_callback, getpid=getpid, apply_target=apply_target, Timeout=Timeout, ) timeout_callback.assert_called_with(False, 10) From 498166793338e6b8bb62594a5d41e80252cffb3c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 6 Aug 2024 18:16:22 +0300 Subject: [PATCH 0777/1051] Bump Kombu to v5.4.0 (#9177) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 182e57a4422..bedec3712cd 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.4.0rc3,<6.0 +kombu>=5.4.0,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From b8f20ec53d335d3c14088e996f63bd22f0534f78 Mon Sep 17 00:00:00 2001 From: Shamil Date: Tue, 6 Aug 2024 22:37:27 +0300 Subject: [PATCH 0778/1051] Add check for soft_time_limit and time_limit values (#9173) * Add check for soft_time_limit and time_limit values * Add tests * Fixed code and tests * Fixed code and tests * Fixed code and tests * Last fix code and tests * Added myself to the list of contributors * Added smoke tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: Tomer Nosrati Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CONTRIBUTORS.txt | 1 + celery/app/task.py | 5 +++++ t/integration/tasks.py | 5 +++++ t/integration/test_tasks.py | 12 +++++++++++- t/smoke/tasks.py | 10 ++++++++++ t/smoke/tests/test_tasks.py | 24 ++++++++++++++++++------ t/unit/tasks/test_tasks.py | 13 +++++++++++++ 7 files changed, 63 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 9c3534b3358..f6494360eeb 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -299,3 +299,4 @@ Tomer Nosrati, 2022/17/07 Andy Zickler, 2024/01/18 Johannes Faigle, 2024/06/18 Giovanni Giampauli, 2024/06/26 +Shamil Abdulaev, 2024/08/05 diff --git a/celery/app/task.py b/celery/app/task.py index 5d55a747b8c..78624655c4e 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -543,6 +543,8 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, TypeError: If not enough arguments are passed, or too many arguments are passed. Note that signature checks may be disabled by specifying ``@task(typing=False)``. + ValueError: If soft_time_limit and time_limit are set, + and soft_time_limit is less than time_limit kombu.exceptions.OperationalError: If a connection to the transport cannot be made, or if the connection is lost. @@ -550,6 +552,9 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, Also supports all keyword arguments supported by :meth:`kombu.Producer.publish`. """ + if self.soft_time_limit and self.time_limit and self.soft_time_limit > self.time_limit: + raise ValueError('soft_time_limit must be greater than or equal to time_limit') + if self.typing: try: check_arguments = self.__header__ diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 752db0278c3..227e3cb2917 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -512,3 +512,8 @@ def replace_with_stamped_task(self: StampedTaskOnReplace, replace_with=None): if replace_with is None: replace_with = replaced_with_me.s() self.replace(signature(replace_with)) + + +@shared_task(soft_time_limit=2, time_limit=1) +def soft_time_limit_must_exceed_time_limit(): + pass diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 060176e8b15..c6fc7476687 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -18,7 +18,8 @@ from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, add_pydantic, fail, fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, - retry_once_priority, retry_unpickleable, return_properties, second_order_replace1, sleeping) + retry_once_priority, retry_unpickleable, return_properties, second_order_replace1, sleeping, + soft_time_limit_must_exceed_time_limit) TIMEOUT = 10 @@ -473,6 +474,15 @@ def test_properties(self, celery_session_worker): res = return_properties.apply_async(app_id="1234") assert res.get(timeout=TIMEOUT)["app_id"] == "1234" + @flaky + def test_soft_time_limit_exceeding_time_limit(self): + + with pytest.raises(ValueError, match='soft_time_limit must be greater than or equal to time_limit'): + result = soft_time_limit_must_exceed_time_limit.apply_async() + result.get(timeout=5) + + assert result.status == 'FAILURE' + class test_trace_log_arguments: args = "CUSTOM ARGS" diff --git a/t/smoke/tasks.py b/t/smoke/tasks.py index 6314dd11865..8250c650bca 100644 --- a/t/smoke/tasks.py +++ b/t/smoke/tasks.py @@ -38,6 +38,16 @@ def long_running_task(seconds: float = 1, verbose: bool = False) -> bool: return True +@shared_task(soft_time_limit=3, time_limit=5) +def soft_time_limit_lower_than_time_limit(): + sleep(4) + + +@shared_task(soft_time_limit=5, time_limit=3) +def soft_time_limit_must_exceed_time_limit(): + pass + + @shared_task(bind=True) def replace_with_task(self: Task, replace_with: Signature = None): if replace_with is None: diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index e55a4b41f30..1878687ecca 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -5,10 +5,11 @@ from tenacity import retry, stop_after_attempt, wait_fixed from celery import Celery, signature -from celery.exceptions import TimeLimitExceeded, WorkerLostError +from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError from t.integration.tasks import add, identity from t.smoke.conftest import SuiteOperations, TaskTermination -from t.smoke.tasks import replace_with_task +from t.smoke.tasks import (replace_with_task, soft_time_limit_lower_than_time_limit, + soft_time_limit_must_exceed_time_limit) class test_task_termination(SuiteOperations): @@ -54,9 +55,7 @@ def wait_for_two_celery_processes(): filters={"name": "celery"}, ) if len(pinfo_current) != 2: - assert ( - False - ), f"Child process did not respawn with method: {method.name}" + assert False, f"Child process did not respawn with method: {method.name}" wait_for_two_celery_processes() @@ -85,7 +84,7 @@ def wait_for_two_celery_processes(): ( TaskTermination.Method.DELAY_TIMEOUT, "Hard time limit (2s) exceeded for t.smoke.tasks.self_termination_delay_timeout", - 'TimeLimitExceeded(2,)', + "TimeLimitExceeded(2,)", ), ( TaskTermination.Method.EXHAUST_MEMORY, @@ -130,3 +129,16 @@ def test_sanity(self, celery_setup: CeleryTestSetup): c = sig1 | sig2 r = c.apply_async(queue=queues[0]) assert r.get(timeout=RESULT_TIMEOUT) == 42 + + +class test_time_limit: + def test_soft_time_limit_lower_than_time_limit(self, celery_setup: CeleryTestSetup): + sig = soft_time_limit_lower_than_time_limit.s() + result = sig.apply_async(queue=celery_setup.worker.worker_queue) + with pytest.raises(SoftTimeLimitExceeded): + result.get(timeout=RESULT_TIMEOUT) is None + + def test_soft_time_limit_must_exceed_time_limit(self, celery_setup: CeleryTestSetup): + sig = soft_time_limit_must_exceed_time_limit.s() + with pytest.raises(ValueError, match="soft_time_limit must be greater than or equal to time_limit"): + sig.apply_async(queue=celery_setup.worker.worker_queue) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 10a373ef54b..7d84f108de3 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1410,6 +1410,19 @@ def yyy5(self): self.app.send_task = old_send_task + def test_soft_time_limit_failure(self): + @self.app.task(soft_time_limit=5, time_limit=3) + def yyy(): + pass + + try: + yyy_result = yyy.apply_async() + yyy_result.get(timeout=5) + + assert yyy_result.state == 'FAILURE' + except ValueError as e: + assert str(e) == 'soft_time_limit must be greater than or equal to time_limit' + class test_apply_task(TasksCase): From 40dafda3ff49ea082613d975a850a374a6ac161e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 6 Aug 2024 23:46:29 +0300 Subject: [PATCH 0779/1051] Prepare for (pre) release: v5.5.0b2 (#9178) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added Changelog for v5.5.0b2 * Bump version: 5.5.0b1 → 5.5.0b2 --- .bumpversion.cfg | 2 +- Changelog.rst | 69 ++++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 73 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f6606dff29f..e9e03aeeeaa 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0b1 +current_version = 5.5.0b2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index ad3a58d3519..cc417b4a7a0 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,75 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0b2: + +5.5.0b2 +======= + +:release-date: 2024-08-06 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 2 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Previous Beta Highlights +~~~~~~~~~~~~~~~~~~~~~~~~ + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bump pytest from 8.3.1 to 8.3.2 (#9153) +- Remove setuptools deprecated test command from setup.py (#9159) +- Pin pre-commit to latest version 3.8.0 from Python 3.9 (#9156) +- Bump mypy from 1.11.0 to 1.11.1 (#9164) +- Change "docker-compose" to "docker compose" in Makefile (#9169) +- update python versions and docker compose (#9171) +- Add support for Pydantic model validation/serialization (fixes #8751) (#9023) +- Allow local dynamodb to be installed on another host than localhost (#8965) +- Terminate job implementation for gevent concurrency backend (#9083) +- Bump Kombu to v5.4.0 (#9177) +- Add check for soft_time_limit and time_limit values (#9173) +- Prepare for (pre) release: v5.5.0b2 (#9178) + .. _version-5.5.0b1: 5.5.0b1 diff --git a/README.rst b/README.rst index dd033be8c9a..e82bfb88dde 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0b1 (immunity) +:Version: 5.5.0b2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 9dec1c0cca8..5df02aa2def 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0b1' +__version__ = '5.5.0b2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 19cbdd61cb7..6850e0a89f4 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0b1 (immunity) +:Version: 5.5.0b2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 78dbd6204a1a6248facf19c0afd29db1e7a35f17 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 12 Aug 2024 12:59:16 +0300 Subject: [PATCH 0780/1051] Added SQS (localstack) broker to canvas smoke tests (#9179) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/conftest.py | 28 +++++++++++++++++++++++----- t/smoke/tests/test_canvas.py | 13 ++++++++++++- t/smoke/workers/docker/dev | 4 ++-- t/smoke/workers/docker/pypi | 2 +- 6 files changed, 40 insertions(+), 11 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index d559eb3eb16..63ab64727e2 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]>=1.0.0 +pytest-celery[all]>=1.1.1 diff --git a/requirements/test.txt b/requirements/test.txt index bf569095bdb..1740aa118d8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.3.2 -pytest-celery[all]>=1.0.0 +pytest-celery[all]>=1.1.1 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 pytest-timeout==2.3.1 diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 6c183a84dcd..073821c61b2 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -1,9 +1,11 @@ import os import pytest -from pytest_celery import REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer +from pytest_celery import (LOCALSTACK_CREDS, REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, + RedisContainer) from pytest_docker_tools import container, fetch +from celery import Celery from t.smoke.operations.task_termination import TaskTermination from t.smoke.operations.worker_kill import WorkerKill from t.smoke.operations.worker_restart import WorkerRestart @@ -67,8 +69,24 @@ def set_redis_test_container(redis_test_container: RedisContainer): def default_worker_env(default_worker_env: dict, redis_test_container: RedisContainer) -> dict: """Add the Redis connection details to the worker environment.""" # get_redis_connection(): will use these settings when executing tasks in the worker - default_worker_env.update({ - "REDIS_HOST": redis_test_container.hostname, - "REDIS_PORT": 6379, - }) + default_worker_env.update( + { + "REDIS_HOST": redis_test_container.hostname, + "REDIS_PORT": 6379, + **LOCALSTACK_CREDS, + } + ) return default_worker_env + + +@pytest.fixture(scope="session", autouse=True) +def set_aws_credentials(): + os.environ.update(LOCALSTACK_CREDS) + + +@pytest.fixture +def default_worker_app(default_worker_app: Celery) -> Celery: + app = default_worker_app + if app.conf.broker_url and app.conf.broker_url.startswith("sqs"): + app.conf.broker_transport_options["region"] = LOCALSTACK_CREDS["AWS_DEFAULT_REGION"] + return app diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 6590315f024..3e146adf351 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,12 +1,23 @@ import uuid import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup +from pytest_celery import (ALL_CELERY_BROKERS, CELERY_LOCALSTACK_BROKER, RESULT_TIMEOUT, CeleryTestBroker, + CeleryTestSetup, _is_vendor_installed) from celery.canvas import chain, chord, group, signature from t.integration.conftest import get_redis_connection from t.integration.tasks import ExpectedException, add, fail, identity, redis_echo +if _is_vendor_installed("localstack"): + ALL_CELERY_BROKERS.add(CELERY_LOCALSTACK_BROKER) + + +@pytest.fixture(params=ALL_CELERY_BROKERS) +def celery_broker(request: pytest.FixtureRequest) -> CeleryTestBroker: # type: ignore + broker: CeleryTestBroker = request.getfixturevalue(request.param) + yield broker + broker.teardown() + class test_signature: def test_sanity(self, celery_setup: CeleryTestSetup): diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index b932dd4b393..d9e5ee82fef 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -38,8 +38,8 @@ WORKDIR /celery COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ - -e /celery[redis,pymemcache,pydantic] \ - pytest-celery>=1.0.0 + -e /celery[redis,pymemcache,pydantic,sqs] \ + pytest-celery>=1.1.1 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 87344cee2ad..a47a2986373 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -38,7 +38,7 @@ EXPOSE 5678 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery>=1.0.0 \ + pytest-celery[sqs]>=1.1.1 \ pydantic>=2.4 # The workdir must be /app From a1878911ec2ea0accccdfad547b4b74c7ec1c3df Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 12 Aug 2024 08:38:38 -0700 Subject: [PATCH 0781/1051] Pin elastic-transport to <= latest version 8.15.0 (#9182) * Pin elastic-transport to latest version 8.15.0 * Changed == to <= --------- Co-authored-by: Tomer Nosrati --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 0a32eaf08d5..480d937b3d4 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.14.0 -elastic-transport<=8.13.1 +elastic-transport<=8.15.0 From 09dc60633093a0d590c702ad09b09f37538f2253 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Aug 2024 14:21:10 +0300 Subject: [PATCH 0782/1051] Update elasticsearch requirement from <=8.14.0 to <=8.15.0 (#9186) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.15.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 480d937b3d4..2717d520ff2 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.14.0 +elasticsearch<=8.15.0 elastic-transport<=8.15.0 From 4f43a7c469e2504501ef494e961ae6571b2d2358 Mon Sep 17 00:00:00 2001 From: Bonifacio de Oliveira Date: Fri, 16 Aug 2024 23:55:19 +0200 Subject: [PATCH 0783/1051] improve formatting (#9188) --- docs/userguide/configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 1250f4ff16e..b165d8dd96e 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -685,7 +685,7 @@ Can be one of the following: Use `Memcached`_ to store the results. See :ref:`conf-cache-result-backend`. -* mongodb +*``mongodb`` Use `MongoDB`_ to store the results. See :ref:`conf-mongodb-result-backend`. From d77b50981cb123052318e45c012e37ab97205cd6 Mon Sep 17 00:00:00 2001 From: Kumar Gaurav Pandey <47948736+necromancerthedark@users.noreply.github.com> Date: Tue, 20 Aug 2024 13:37:29 +0530 Subject: [PATCH 0784/1051] Add basic helm chart for celery (#9181) * Add basic helm chart for celery * Update helm-chart/templates/deployment.yaml * Update helm-chart/templates/configmap.yaml * Update helm-chart/templates/secret.yaml * exclude helm chart from pre-commit since it is not truly yaml * add readme for helm-chart --------- Co-authored-by: Kumar Gaurav Pandey Co-authored-by: Asif Saif Uddin --- .pre-commit-config.yaml | 1 + helm-chart/.helmignore | 23 ++++++ helm-chart/Chart.yaml | 6 ++ helm-chart/README.rst | 77 ++++++++++++++++++++ helm-chart/templates/_helpers.tpl | 62 ++++++++++++++++ helm-chart/templates/configmap.yaml | 8 ++ helm-chart/templates/deployment.yaml | 70 ++++++++++++++++++ helm-chart/templates/secret.yaml | 13 ++++ helm-chart/templates/serviceaccount.yaml | 14 ++++ helm-chart/values.yaml | 93 ++++++++++++++++++++++++ 10 files changed, 367 insertions(+) create mode 100644 helm-chart/.helmignore create mode 100644 helm-chart/Chart.yaml create mode 100644 helm-chart/README.rst create mode 100644 helm-chart/templates/_helpers.tpl create mode 100644 helm-chart/templates/configmap.yaml create mode 100644 helm-chart/templates/deployment.yaml create mode 100644 helm-chart/templates/secret.yaml create mode 100644 helm-chart/templates/serviceaccount.yaml create mode 100644 helm-chart/values.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 365aae1b0a9..25c86c9ab06 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,6 +22,7 @@ repos: - id: check-merge-conflict - id: check-toml - id: check-yaml + exclude: helm-chart/templates/ - id: mixed-line-ending - repo: https://github.com/pycqa/isort diff --git a/helm-chart/.helmignore b/helm-chart/.helmignore new file mode 100644 index 00000000000..0e8a0eb36f4 --- /dev/null +++ b/helm-chart/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/helm-chart/Chart.yaml b/helm-chart/Chart.yaml new file mode 100644 index 00000000000..5f96f212b28 --- /dev/null +++ b/helm-chart/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: celery +description: A Helm chart for Celery +type: application +version: 0.1.0 +appVersion: "1.16.0" diff --git a/helm-chart/README.rst b/helm-chart/README.rst new file mode 100644 index 00000000000..93a5adc2285 --- /dev/null +++ b/helm-chart/README.rst @@ -0,0 +1,77 @@ +Helm Chart for Celery +===================== + +This helm chart can be used for deploying Celery in local or a kubernetes server. + +It contains following main folders/files: + +:: + + helm-chart + ├── Chart.yaml + ├── README.rst + ├── templates + │   ├── _helpers.tpl + │   ├── configmap.yaml + │   ├── deployment.yaml + │   ├── secret.yaml + │   └── serviceaccount.yaml + └── values.yaml + +The most important file here will be ``values.yaml``. +This will be used for setting/altering parameters, most of the parameters are annotated inside ``values.yaml`` with comments. + +Deploying on Cluster: +-------------------- + +If you want to setup and test on local, check out: `setting up on local`_ + +To install on kubernetes cluster run following command from root of project: + +:: + + helm install celery helm-chart/ + +You can also setup environment-wise value files, for example: ``values_dev.yaml`` for ``dev`` env, +then you can use following command to override the current ``values.yaml`` file's parameters to be environment specific: + +:: + + helm install celery helm-chart/ --values helm-chart/values_dev.yaml + +To upgrade an existing installation of chart you can use: + +:: + + helm upgrade --install celery helm-chart/ + + or + + helm upgrade --install celery helm-chart/ --values helm-chart/values_dev.yaml + + +You can uninstall the chart using helm: + +:: + + helm uninstall celery + +.. _setting up on local: + +Setting up on local: +-------------------- +To setup kubernetes cluster on local use the following link: + +- k3d_ +- `Colima (recommended if you are on MacOS)`_ + +.. _`k3d`: https://k3d.io/v5.7.3/ +.. _`Colima (recommended if you are on MacOS)`: https://github.com/abiosoft/colima?tab=readme-ov-file#kubernetes + +You will also need following tools: + +- `helm cli`_ +- `kubectl`_ + +.. _helm cli: https://helm.sh/docs/intro/install/ +.. _kubectl: https://kubernetes.io/docs/tasks/tools/ diff --git a/helm-chart/templates/_helpers.tpl b/helm-chart/templates/_helpers.tpl new file mode 100644 index 00000000000..7fc608d69ed --- /dev/null +++ b/helm-chart/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "..name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "..fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "..chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "..labels" -}} +helm.sh/chart: {{ include "..chart" . }} +{{ include "..selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "..selectorLabels" -}} +app.kubernetes.io/name: {{ include "..name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "..serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "..fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/helm-chart/templates/configmap.yaml b/helm-chart/templates/configmap.yaml new file mode 100644 index 00000000000..a762821f9ae --- /dev/null +++ b/helm-chart/templates/configmap.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ .Values.configmap.name }} + labels: + app: {{ include "..fullname" . }} +data: +{{- .Values.configmap.data | toYaml | nindent 2 }} diff --git a/helm-chart/templates/deployment.yaml b/helm-chart/templates/deployment.yaml new file mode 100644 index 00000000000..95e1f75004c --- /dev/null +++ b/helm-chart/templates/deployment.yaml @@ -0,0 +1,70 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "..fullname" . }} + labels: + app: {{ include "..name" . }} + annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} +spec: + replicas: {{ .Values.replicaCount }} + revisionHistoryLimit: 2 + selector: + matchLabels: + app: {{ include "..name" . }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + app: {{ include "..name" . }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "..serviceAccountName" . }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ include "..fullname" . }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + envFrom: + - configMapRef: + name: {{ include "..fullname" . }} + {{- if .Values.secrets.enabled }} + - secretRef: + name: {{ include "..fullname" . }} + {{- end }} + livenessProbe: + {{- toYaml .Values.livenessProbe | nindent 12 }} + readinessProbe: + {{- toYaml .Values.readinessProbe | nindent 12 }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.volumes }} + volumes: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + \ No newline at end of file diff --git a/helm-chart/templates/secret.yaml b/helm-chart/templates/secret.yaml new file mode 100644 index 00000000000..b084a02a626 --- /dev/null +++ b/helm-chart/templates/secret.yaml @@ -0,0 +1,13 @@ +{{- if .Values.secrets.enabled }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ .Values.secrets.name }} + labels: + app: {{ include "..fullname" . }} +type: Opaque +data: + {{- range $key, $value := .Values.secrets.data }} + {{ $key }}: {{ $value | quote }} + {{- end }} +{{- end }} diff --git a/helm-chart/templates/serviceaccount.yaml b/helm-chart/templates/serviceaccount.yaml new file mode 100644 index 00000000000..81619eab0eb --- /dev/null +++ b/helm-chart/templates/serviceaccount.yaml @@ -0,0 +1,14 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "..serviceAccountName" . }} + namespace: {{- .Values.namespace -}} + labels: + {{- include "..labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +automountServiceAccountToken: {{ .Values.serviceAccount.automount }} +{{- end }} diff --git a/helm-chart/values.yaml b/helm-chart/values.yaml new file mode 100644 index 00000000000..59da2e9b14d --- /dev/null +++ b/helm-chart/values.yaml @@ -0,0 +1,93 @@ +replicaCount: 4 + +image: + repository: "celery/celery" + pullPolicy: IfNotPresent + # Overrides the image tag whose default is the chart appVersion. + tag: "dev" + + +namespace: "celery" +imagePullSecrets: [] +nameOverride: "celery" +fullnameOverride: "celery" + +serviceAccount: + # Specifies whether a service account should be created + create: true + # Automatically mount a ServiceAccount's API credentials? + automount: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: "celery" + + +secrets: + enabled: false + name: celery + data: {} + +podAnnotations: {} +podLabels: {} + +podSecurityContext: {} + # fsGroup: 2000 + +securityContext: {} + # capabilities: + # drop: + # - ALL + # readOnlyRootFilesystem: true + # runAsNonRoot: true + # runAsUser: 1000 + +service: + type: ClusterIP + port: 80 + +resources: {} + +## Do not change liveness and readiness probe unless you are absolutely certain +livenessProbe: + exec: + command: [ + "/usr/local/bin/python3", + "-c", + "\"import os;from celery.task.control import inspect;from import celery_app;exit(0 if os.environ['HOSTNAME'] in ','.join(inspect(app=celery_app).stats().keys()) else 1)\"" + ] + +readinessProbe: + exec: + command: [ + "/usr/local/bin/python3", + "-c", + "\"import os;from celery.task.control import inspect;from import celery_app;exit(0 if os.environ['HOSTNAME'] in ','.join(inspect(app=celery_app).stats().keys()) else 1)\"" + ] + +# You can add env variables needed for celery +configmap: + name: "celery" + data: + CELERY_BROKER_URL: "" + +# Additional volumes on the output Deployment definition. +volumes: [] +# - name: foo +# secret: +# secretName: mysecret +# optional: false + +# Additional volumeMounts on the output Deployment definition. +volumeMounts: [] +# - name: foo +# mountPath: "/etc/foo" +# readOnly: true + +nodeSelector: {} + +tolerations: [] + +affinity: {} + From b1d906f62c9254c9755f34ccd2103412958e38a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9B=D0=BE=D0=BA=D0=BE=D1=82=D0=BE=D0=BA?= <47851014+lokot0k@users.noreply.github.com> Date: Wed, 21 Aug 2024 23:53:57 +0300 Subject: [PATCH 0785/1051] Update kafka.rst (#9194) Fixed wrong code example --- .../backends-and-brokers/kafka.rst | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/docs/getting-started/backends-and-brokers/kafka.rst b/docs/getting-started/backends-and-brokers/kafka.rst index ab0627fd384..e5b0ea0b68e 100644 --- a/docs/getting-started/backends-and-brokers/kafka.rst +++ b/docs/getting-started/backends-and-brokers/kafka.rst @@ -31,18 +31,20 @@ For celeryconfig.py: sasl_username = os.environ["SASL_USERNAME"] sasl_password = os.environ["SASL_PASSWORD"] broker_url = f"confluentkafka://{sasl_username}:{sasl_password}@broker:9094" - kafka_admin_config = { - "sasl.username": sasl_username, - "sasl.password": sasl_password, - } - kafka_common_config = { - "sasl.username": sasl_username, - "sasl.password": sasl_password, - "security.protocol": "SASL_SSL", - "sasl.mechanism": "SCRAM-SHA-512", - "bootstrap_servers": "broker:9094", - } - + broker_transport_options.update({ + "kafka_admin_config": { + "sasl.username": sasl_username, + "sasl.password": sasl_password, + }, + "kafka_common_config": { + "sasl.username": sasl_username, + "sasl.password": sasl_password, + "security.protocol": "SASL_SSL", + "sasl.mechanism": "SCRAM-SHA-512", + "bootstrap_servers": "broker:9094", + } + }) + Please note that "allow_create_topics" is needed if the topic does not exist yet but is not necessary otherwise. From 78ac69cfed7c485ba32726deaeaf6401d5e7bc1f Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Thu, 22 Aug 2024 07:51:42 -0700 Subject: [PATCH 0786/1051] Update pytest-order from 1.2.1 to 1.3.0 (#9198) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 1740aa118d8..5e1d9f28aac 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ pytest-rerunfailures==14.0 pytest-subtests==0.13.1 pytest-timeout==2.3.1 pytest-click==1.1.0 -pytest-order==1.2.1 +pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions From b1061a0f8eddd7df2c321d3ccad532b35f31992a Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sat, 24 Aug 2024 21:37:02 -0700 Subject: [PATCH 0787/1051] Update mypy from 1.11.1 to 1.11.2 (#9206) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 5e1d9f28aac..c21c462d77b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.11.1; platform_python_implementation=="CPython" +mypy==1.11.2; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.6.0; python_version < '3.9' pre-commit>=3.8.0; python_version >= '3.9' -r extras/yaml.txt From 7073a6856c2fc6c88364ea7ce73da7593737ca7f Mon Sep 17 00:00:00 2001 From: dhruvji Date: Fri, 23 Aug 2024 08:42:54 +0000 Subject: [PATCH 0788/1051] all added to routes --- celery/app/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/routes.py b/celery/app/routes.py index a56ce59e00b..bed2c07a51f 100644 --- a/celery/app/routes.py +++ b/celery/app/routes.py @@ -20,7 +20,7 @@ # for support Python 3.7 Pattern = re.Pattern -__all__ = ('MapRoute', 'Router', 'prepare') +__all__ = ('MapRoute', 'Router', 'expand_router_string', 'prepare') class MapRoute: From 23d5f96b44cebc16466475057b761cc652d12332 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 20:02:58 +0300 Subject: [PATCH 0789/1051] [pre-commit.ci] pre-commit autoupdate (#9209) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.11.1 → v1.11.2](https://github.com/pre-commit/mirrors-mypy/compare/v1.11.1...v1.11.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 25c86c9ab06..28bc1455dee 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.1 + rev: v1.11.2 hooks: - id: mypy pass_filenames: false From 15a63eac5ec22764a8e03a7c254ac2d601bd82b6 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 27 Aug 2024 16:17:37 +0200 Subject: [PATCH 0790/1051] Fix typos discovered by codespell --- .codespellrc | 4 ---- .pre-commit-config.yaml | 8 ++++++++ celery/app/task.py | 2 +- celery/bin/base.py | 2 +- celery/utils/log.py | 2 +- celery/utils/saferepr.py | 2 +- docs/userguide/configuration.rst | 2 +- extra/generic-init.d/celerybeat | 2 +- pyproject.toml | 4 ++++ 9 files changed, 18 insertions(+), 10 deletions(-) delete mode 100644 .codespellrc diff --git a/.codespellrc b/.codespellrc deleted file mode 100644 index e35a7588699..00000000000 --- a/.codespellrc +++ /dev/null @@ -1,4 +0,0 @@ -[codespell] -skip = .git,.venv,*.svg,package-lock.json,*.key -# Some names and timezone (lower cased) -ignore-words-list = gool,markey,sherif,wil,ist,fromm,brane,bu,nott diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28bc1455dee..b33e778a75c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,6 +16,14 @@ repos: - id: yesqa exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ + - repo: https://github.com/codespell-project/codespell + rev: v2.3.0 + hooks: + - id: codespell # See pyproject.toml for args + args: [--toml, pyproject.toml] + additional_dependencies: + - tomli + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: diff --git a/celery/app/task.py b/celery/app/task.py index 78624655c4e..033e5661233 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -466,7 +466,7 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, shadow (str): Override task name used in logs/monitoring. Default is retrieved from :meth:`shadow_name`. - connection (kombu.Connection): Re-use existing broker connection + connection (kombu.Connection): Reuse existing broker connection instead of acquiring one from the connection pool. retry (bool): If enabled sending of the task message will be diff --git a/celery/bin/base.py b/celery/bin/base.py index 57158a27e06..073b86a7e91 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -181,7 +181,7 @@ def __init__(self, *args, **kwargs): def daemon_setting(self, ctx: Context, opt: CeleryOption, value: Any) -> Any: """ - Try to fetch deamonization option from applications settings. + Try to fetch daemonization option from applications settings. Use the daemon command name as prefix (eg. `worker` -> `worker_pidfile`) """ return value or getattr(ctx.obj.app.conf, f"{ctx.command.name}_{self.name}", None) diff --git a/celery/utils/log.py b/celery/utils/log.py index 4e8fc11ff72..f67a3dd700c 100644 --- a/celery/utils/log.py +++ b/celery/utils/log.py @@ -37,7 +37,7 @@ def set_in_sighandler(value): - """Set flag signifiying that we're inside a signal handler.""" + """Set flag signifying that we're inside a signal handler.""" global _in_sighandler _in_sighandler = value diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py index 68768882fc7..9b37bc92ed1 100644 --- a/celery/utils/saferepr.py +++ b/celery/utils/saferepr.py @@ -41,7 +41,7 @@ #: Recursion protection. _dirty = namedtuple('_dirty', ('objid',)) -#: Types that are repsented as chars. +#: Types that are represented as chars. chars_t = (bytes, str) #: Types that are regarded as safe to call repr on. diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index b165d8dd96e..5d7ed9c8b07 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1578,7 +1578,7 @@ Example configuration (Astra DB) Additional configuration ~~~~~~~~~~~~~~~~~~~~~~~~ -The Cassandra driver, when estabilishing the connection, undergoes a stage +The Cassandra driver, when establishing the connection, undergoes a stage of negotiating the protocol version with the server(s). Similarly, a load-balancing policy is automatically supplied (by default ``DCAwareRoundRobinPolicy``, which in turn has a ``local_dc`` setting, also diff --git a/extra/generic-init.d/celerybeat b/extra/generic-init.d/celerybeat index 8007a2d1325..b554844d2f9 100755 --- a/extra/generic-init.d/celerybeat +++ b/extra/generic-init.d/celerybeat @@ -57,7 +57,7 @@ _config_sanity() { echo echo "Resolution:" echo "Review the file carefully, and make sure it hasn't been " - echo "modified with mailicious intent. When sure the " + echo "modified with malicious intent. When sure the " echo "script is safe to execute with superuser privileges " echo "you can change ownership of the script:" echo " $ sudo chown root '$path'" diff --git a/pyproject.toml b/pyproject.toml index e4d3f6fd838..dae3f95465b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,10 @@ files = [ "celery/apps/beat.py", ] +[tool.codespell] +ignore-words-list = "assertin" +skip = "./.*,docs/AUTHORS.txt,docs/history/*,docs/spelling_wordlist.txt,Changelog.rst,CONTRIBUTORS.txt,*.key" + [tool.coverage.run] branch = true cover_pylib = false From b1d55f62abc2ae48b0c191eb0c52a46a7cba66bc Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 28 Aug 2024 17:58:30 +0600 Subject: [PATCH 0791/1051] Use tzdata extras with zoneinfo backports (#8286) --- requirements/default.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements/default.txt b/requirements/default.txt index bedec3712cd..2ce13715227 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -6,6 +6,5 @@ click-didyoumean>=0.3.0 click-repl>=0.2.0 click-plugins>=1.1.1 importlib-metadata>=3.6; python_version < '3.8' -backports.zoneinfo>=0.2.1; python_version < '3.9' -tzdata>=2022.7 +backports.zoneinfo[tzdata]>=0.2.1; python_version < '3.9' python-dateutil>=2.8.2 From 63bd643219372167ad3c2b09e1b5620afdd61d5b Mon Sep 17 00:00:00 2001 From: KeisukeYamashita <19yamashita15@gmail.com> Date: Sun, 1 Sep 2024 00:42:05 +0200 Subject: [PATCH 0792/1051] Use `docker compose` in Contributing's doc build section (#9219) Signed-off-by: KeisukeYamashita <19yamashita15@gmail.com> --- CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 2a2d239320d..ef6b4ba90a4 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -718,7 +718,7 @@ Build the documentation by running: .. code-block:: console - $ docker-compose -f docker/docker-compose.yml up --build docs + $ docker compose -f docker/docker-compose.yml up --build docs The service will start a local docs server at ``:7000``. The server is using ``sphinx-autobuild`` with the ``--watch`` option enabled, so you can live From f96a431df5d534620cef7aff3a309908d756964c Mon Sep 17 00:00:00 2001 From: Marius Gedminas Date: Sun, 1 Sep 2024 14:51:02 +0300 Subject: [PATCH 0793/1051] Failing test for issue #9119 (#9215) * Add a failing test case for #9119 * Mark the test as xfail --- t/unit/utils/test_dispatcher.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/t/unit/utils/test_dispatcher.py b/t/unit/utils/test_dispatcher.py index b100b68b800..07ee2216dc9 100644 --- a/t/unit/utils/test_dispatcher.py +++ b/t/unit/utils/test_dispatcher.py @@ -2,6 +2,8 @@ import sys import time +import pytest + from celery.utils.dispatch import Signal if sys.platform.startswith('java'): @@ -182,3 +184,17 @@ def test_boundmethod(self): del a, result, expected garbage_collect() self._testIsClean(a_signal) + + @pytest.mark.xfail(reason="Issue #9119") + def test_disconnect_retryable_decorator(self): + # Regression test for https://github.com/celery/celery/issues/9119 + + @a_signal.connect(sender=self, retry=True) + def succeeds_eventually(val, **kwargs): + return val + + try: + a_signal.send(sender=self, val='test') + finally: + a_signal.disconnect(succeeds_eventually, sender=self) + self._testIsClean(a_signal) From f436f10303b25291c43329704d98a906095f08b8 Mon Sep 17 00:00:00 2001 From: Gwangho Kim Date: Sun, 1 Sep 2024 22:42:53 +0900 Subject: [PATCH 0794/1051] Fix date_done timezone issue (#8385) * add fixed suggestion * [Fix #4842] Fix to use celery_taskmeta.date_done as app.conf.timezone setting * [Fix #4842] delete unused module(datetime) * refac: remove unused import * fix mock from datetime to celery.now * fix change the mocking target * fix: Change the test method for date_done * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update t/unit/tasks/test_result.py Co-authored-by: Christian Clauss * Update t/unit/tasks/test_result.py Co-authored-by: Christian Clauss * fix: Change from mocking to cache removal approach * refac: clean package and upper utc --------- Co-authored-by: codesik Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati Co-authored-by: Omer Katz Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Christian Clauss Co-authored-by: Omer Katz --- celery/backends/base.py | 4 ++-- t/unit/backends/test_elasticsearch.py | 8 ++++---- t/unit/tasks/test_result.py | 25 ++++++++++++++----------- 3 files changed, 20 insertions(+), 17 deletions(-) diff --git a/celery/backends/base.py b/celery/backends/base.py index 3a29f1e9996..dc79f4ebd73 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -9,7 +9,7 @@ import time import warnings from collections import namedtuple -from datetime import datetime, timedelta, timezone +from datetime import timedelta from functools import partial from weakref import WeakValueDictionary @@ -460,7 +460,7 @@ def _get_result_meta(self, result, state, traceback, request, format_date=True, encode=False): if state in self.READY_STATES: - date_done = datetime.now(timezone.utc) + date_done = self.app.now() if format_date: date_done = date_done.isoformat() else: diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index a465cbcf501..13e72833ec1 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -388,7 +388,7 @@ def test_index_conflict_with_existing_ready_state(self, datetime_mock): x._server.update.assert_not_called() @patch('celery.backends.elasticsearch.datetime') - @patch('celery.backends.base.datetime') + @patch('celery.app.base.datetime') def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) es_datetime_mock.now.return_value = expected_dt @@ -499,7 +499,7 @@ def test_backend_concurrent_update(self, base_datetime_mock, es_datetime_mock): self.app.conf.result_backend_always_retry = prev @patch('celery.backends.elasticsearch.datetime') - @patch('celery.backends.base.datetime') + @patch('celery.app.base.datetime') def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es_datetime_mock): expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) es_datetime_mock.now.return_value = expected_dt @@ -570,7 +570,7 @@ def test_backend_index_conflicting_document_removed(self, base_datetime_mock, es self.app.conf.result_backend_always_retry = prev @patch('celery.backends.elasticsearch.datetime') - @patch('celery.backends.base.datetime') + @patch('celery.app.base.datetime') def test_backend_index_conflicting_document_removed_not_throwing(self, base_datetime_mock, es_datetime_mock): expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) es_datetime_mock.now.return_value = expected_dt @@ -638,7 +638,7 @@ def test_backend_index_conflicting_document_removed_not_throwing(self, base_date self.app.conf.result_backend_always_retry = prev @patch('celery.backends.elasticsearch.datetime') - @patch('celery.backends.base.datetime') + @patch('celery.app.base.datetime') def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, es_datetime_mock): expected_dt = datetime(2020, 6, 1, 18, 43, 24, 123456, timezone.utc) es_datetime_mock.now.return_value = expected_dt diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 1f7f7e08ccf..062c0695427 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -9,7 +9,7 @@ from celery import states, uuid from celery.app.task import Context -from celery.backends.base import SyncBackendMixin +from celery.backends.base import Backend, SyncBackendMixin from celery.exceptions import ImproperlyConfigured, IncompleteStream, TimeoutError from celery.result import AsyncResult, EagerResult, GroupResult, ResultSet, assert_will_not_block, result_from_tuple from celery.utils.serialization import pickle @@ -434,17 +434,20 @@ def test_get_request_meta(self): result = self.app.AsyncResult(self.task4['id']) assert result.date_done is None - @pytest.mark.parametrize('result_dict, date', [ - ({'date_done': None}, None), - ({'date_done': '1991-10-05T05:41:06'}, - datetime.datetime(1991, 10, 5, 5, 41, 6)), - ({'date_done': datetime.datetime(1991, 10, 5, 5, 41, 6)}, - datetime.datetime(1991, 10, 5, 5, 41, 6)) + @patch('celery.app.base.to_utc') + @pytest.mark.parametrize('timezone, date', [ + ("UTC", "2024-08-24T00:00:00+00:00"), + ("America/Los_Angeles", "2024-08-23T17:00:00-07:00"), + ("Pacific/Kwajalein", "2024-08-24T12:00:00+12:00"), + ("Europe/Berlin", "2024-08-24T02:00:00+02:00"), ]) - def test_date_done(self, result_dict, date): - result = self.app.AsyncResult(uuid()) - result._cache = result_dict - assert result.date_done == date + def test_date_done(self, utc_datetime_mock, timezone, date): + utc_datetime_mock.return_value = datetime.datetime(2024, 8, 24, 0, 0, 0, 0, datetime.timezone.utc) + self.app.conf.timezone = timezone + del self.app.timezone # reset cached timezone + + result = Backend(app=self.app)._get_result_meta(None, states.SUCCESS, None, None) + assert result.get('date_done') == date class test_ResultSet: From 65e17c815c0c50ca81146ec2482d0da0e3589eb9 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 1 Sep 2024 21:29:25 +0300 Subject: [PATCH 0795/1051] CI Fixes to smoke tests (#9223) * Added Smoke-quorum_queues to CI * Split smoke tests CI runs to groups to improve stability --- .github/workflows/python-package.yml | 65 ++++++++++++++++++++++------ 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 143180828fb..8e3bc6cc912 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -162,6 +162,43 @@ jobs: tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover + Smoke-quorum_queues: + needs: + - Integration + if: needs.Integration.result == 'success' + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + - name: > + Run tox for + "${{ matrix.python-version }}-smoke" + timeout-minutes: 20 + run: > + tox --verbose --verbose -e + "${{ matrix.python-version }}-smoke" -- -n auto -k quorum_queues + Smoke-stamping: needs: - Integration @@ -201,8 +238,8 @@ jobs: Smoke-canvas: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-stamping + if: needs.Smoke-stamping.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -238,8 +275,8 @@ jobs: Smoke-consumer: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-stamping + if: needs.Smoke-stamping.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -275,8 +312,8 @@ jobs: Smoke-control: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-stamping + if: needs.Smoke-stamping.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -312,8 +349,8 @@ jobs: Smoke-signals: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-stamping + if: needs.Smoke-stamping.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -349,8 +386,8 @@ jobs: Smoke-tasks: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-control + if: needs.Smoke-control.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -386,8 +423,8 @@ jobs: Smoke-thread_safe: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-control + if: needs.Smoke-control.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false @@ -423,8 +460,8 @@ jobs: Smoke-worker: needs: - - Integration - if: needs.Integration.result == 'success' + - Smoke-control + if: needs.Smoke-control.result == 'success' runs-on: ubuntu-latest strategy: fail-fast: false From 71e8db96fbff455b346dd47a3fe617864b0d6697 Mon Sep 17 00:00:00 2001 From: Nikos Atlas Date: Mon, 2 Sep 2024 12:42:26 +0200 Subject: [PATCH 0796/1051] fix: passes current request context when pushing to request_stack (#9208) the _install_stack_protection worker optimisation patches the BaseTask.__call__ method to call `task.run` directly. when it does not call the `task.run` directly it instead calls the BaseTask.__call__ which pushes the new request to the stack, but only passes the `args,kwargs` of the task bypassing all the options. (https://github.com/celery/celery/blob/78c06af57ec0bc4afe84bf21289d2c0b50dcb313/celery/app/trace.py#L737) the tracer is properly generating the `request` context based on all the options passed and directly pushes to the task stack. also the tracer skips the `__call__` method (https://github.com/celery/celery/blob/78c06af57ec0bc4afe84bf21289d2c0b50dcb313/celery/app/trace.py#L324-L327) the combination of the above leads to the tracer calling the task with only the args and kwargs of the task. this commit enhances the push_request method to generate a new context based on the `task.request` which should include all the options required. Signed-off-by: Nikos Atlas --- CONTRIBUTORS.txt | 1 + celery/app/task.py | 2 +- t/unit/tasks/test_trace.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index f6494360eeb..b651f3ae414 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -300,3 +300,4 @@ Andy Zickler, 2024/01/18 Johannes Faigle, 2024/06/18 Giovanni Giampauli, 2024/06/26 Shamil Abdulaev, 2024/08/05 +Nikos Atlas, 2024/08/26 diff --git a/celery/app/task.py b/celery/app/task.py index 033e5661233..ed1d6ed854b 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -1114,7 +1114,7 @@ def add_trail(self, result): return result def push_request(self, *args, **kwargs): - self.request_stack.push(Context(*args, **kwargs)) + self.request_stack.push(Context(*args, **{**self.request.__dict__, **kwargs})) def pop_request(self): self.request_stack.pop() diff --git a/t/unit/tasks/test_trace.py b/t/unit/tasks/test_trace.py index 3494b52fdfd..cd0c8c6901e 100644 --- a/t/unit/tasks/test_trace.py +++ b/t/unit/tasks/test_trace.py @@ -629,3 +629,37 @@ def foo(self, i): assert foo(1).called_directly finally: reset_worker_optimizations(self.app) + + def test_stackprotection_headers_passed_on_new_request_stack(self): + setup_worker_optimizations(self.app) + try: + + @self.app.task(shared=False, bind=True) + def foo(self, i): + if i: + return foo.apply(args=(i-1,), headers=456) + return self.request + + task = foo.apply(args=(2,), headers=123, loglevel=5) + assert task.result.result.result.args == (0,) + assert task.result.result.result.headers == 456 + assert task.result.result.result.loglevel == 0 + finally: + reset_worker_optimizations(self.app) + + def test_stackprotection_headers_persisted_calling_task_directly(self): + setup_worker_optimizations(self.app) + try: + + @self.app.task(shared=False, bind=True) + def foo(self, i): + if i: + return foo(i-1) + return self.request + + task = foo.apply(args=(2,), headers=123, loglevel=5) + assert task.result.args == (0,) + assert task.result.headers == 123 + assert task.result.loglevel == 5 + finally: + reset_worker_optimizations(self.app) From e2428a8ffbdf564e05d92f5572bccc535acb789d Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Mon, 2 Sep 2024 15:50:16 +0300 Subject: [PATCH 0797/1051] Fix broken link in the Using RabbitMQ docs page. (#9226) --- docs/getting-started/backends-and-brokers/rabbitmq.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index d5359843db1..a7f1bfbaba4 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -28,7 +28,7 @@ username, password and vhost. Installing the RabbitMQ Server ============================== -See `Installing RabbitMQ`_ over at RabbitMQ's website. For macOS +See `Downloading and Installing RabbitMQ`_ over at RabbitMQ's website. For macOS see `Installing RabbitMQ on macOS`_. .. _`Downloading and Installing RabbitMQ`: https://www.rabbitmq.com/download.html From d31fdc6181f6178e6f45b7892a02679800cf12ba Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 3 Sep 2024 15:07:20 +0300 Subject: [PATCH 0798/1051] Added Soft Shutdown Mechanism (#9213) * Added Soft Shutdown Mechanism * Added SQS broker to t/smoke/tests/test_worker.py tests * Added reference to the soft shutdown in the Redis and SQS documentation to mention how it may mitigate issues during shutdown with a long visibility timeout --- .github/workflows/python-package.yml | 18 +- celery/app/defaults.py | 1 + celery/apps/worker.py | 135 ++++++- celery/worker/consumer/consumer.py | 12 + celery/worker/request.py | 2 +- celery/worker/worker.py | 26 +- .../backends-and-brokers/redis.rst | 24 ++ .../backends-and-brokers/sqs.rst | 49 ++- docs/userguide/configuration.rst | 23 ++ docs/userguide/workers.rst | 132 +++++++ t/smoke/operations/worker_kill.py | 20 +- .../tests/failover/test_worker_failover.py | 4 - t/smoke/tests/test_consumer.py | 10 +- t/smoke/tests/test_worker.py | 371 +++++++++++++++++- t/unit/worker/test_consumer.py | 26 ++ t/unit/worker/test_worker.py | 9 + 16 files changed, 812 insertions(+), 50 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 8e3bc6cc912..41e93544f5a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -157,7 +157,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k failover @@ -231,7 +231,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k stamping @@ -268,7 +268,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py @@ -305,7 +305,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py @@ -342,7 +342,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py @@ -379,7 +379,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py @@ -416,7 +416,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py @@ -453,7 +453,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py @@ -490,7 +490,7 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 + timeout-minutes: 30 run: > tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k test_worker.py diff --git a/celery/app/defaults.py b/celery/app/defaults.py index b9aaf66ef65..5a6ea5af1d4 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -309,6 +309,7 @@ def __repr__(self): cancel_long_running_tasks_on_connection_loss=Option( False, type='bool' ), + soft_shutdown_timeout=Option(0.0, type='float'), concurrency=Option(None, type='int'), consumer=Option('celery.worker.consumer:Consumer', type='string'), direct=Option(False, type='bool', old={'celery_worker_direct'}), diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 1556531e523..5ad3db0eaba 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -278,15 +278,27 @@ def set_process_status(self, info): ) -def _shutdown_handler(worker, sig='TERM', how='Warm', - callback=None, exitcode=EX_OK): +def _shutdown_handler(worker: Worker, sig='TERM', how='Warm', callback=None, exitcode=EX_OK, verbose=True): + """Install signal handler for warm/cold shutdown. + + The handler will run from the MainProcess. + + Args: + worker (Worker): The worker that received the signal. + sig (str, optional): The signal that was received. Defaults to 'TERM'. + how (str, optional): The type of shutdown to perform. Defaults to 'Warm'. + callback (Callable, optional): Signal handler. Defaults to None. + exitcode (int, optional): The exit code to use. Defaults to EX_OK. + verbose (bool, optional): Whether to print the type of shutdown. Defaults to True. + """ def _handle_request(*args): with in_sighandler(): from celery.worker import state if current_process()._name == 'MainProcess': if callback: callback(worker) - safe_say(f'worker: {how} shutdown (MainProcess)', sys.__stdout__) + if verbose: + safe_say(f'worker: {how} shutdown (MainProcess)', sys.__stdout__) signals.worker_shutting_down.send( sender=worker.hostname, sig=sig, how=how, exitcode=exitcode, @@ -297,19 +309,126 @@ def _handle_request(*args): platforms.signals[sig] = _handle_request +def on_hard_shutdown(worker: Worker): + """Signal handler for hard shutdown. + + The handler will terminate the worker immediately by force using the exit code ``EX_FAILURE``. + + In practice, you should never get here, as the standard shutdown process should be enough. + This handler is only for the worst-case scenario, where the worker is stuck and cannot be + terminated gracefully (e.g., spamming the Ctrl+C in the terminal to force the worker to terminate). + + Args: + worker (Worker): The worker that received the signal. + + Raises: + WorkerTerminate: This exception will be raised in the MainProcess to terminate the worker immediately. + """ + from celery.exceptions import WorkerTerminate + raise WorkerTerminate(EX_FAILURE) + + +def during_soft_shutdown(worker: Worker): + """This signal handler is called when the worker is in the middle of the soft shutdown process. + + When the worker is in the soft shutdown process, it is waiting for tasks to finish. If the worker + receives a SIGINT (Ctrl+C) or SIGQUIT signal (or possibly SIGTERM if REMAP_SIGTERM is set to "SIGQUIT"), + the handler will cancels all unacked requests to allow the worker to terminate gracefully and replace the + signal handler for SIGINT and SIGQUIT with the hard shutdown handler ``on_hard_shutdown`` to terminate + the worker immediately by force next time the signal is received. + + It will give the worker once last chance to gracefully terminate (the cold shutdown), after canceling all + unacked requests, before using the hard shutdown handler to terminate the worker forcefully. + + Args: + worker (Worker): The worker that received the signal. + """ + # Replace the signal handler for SIGINT (Ctrl+C) and SIGQUIT (and possibly SIGTERM) + # with the hard shutdown handler to terminate the worker immediately by force + install_worker_term_hard_handler(worker, sig='SIGINT', callback=on_hard_shutdown, verbose=False) + install_worker_term_hard_handler(worker, sig='SIGQUIT', callback=on_hard_shutdown) + + # Cancel all unacked requests and allow the worker to terminate naturally + worker.consumer.cancel_all_unacked_requests() + + # We get here if the worker was in the middle of the soft (cold) shutdown process, + # and the matching signal was received. This can typically happen when the worker is + # waiting for tasks to finish, and the user decides to still cancel the running tasks. + # We give the worker the last chance to gracefully terminate by letting the soft shutdown + # waiting time to finish, which is running in the MainProcess from the previous signal handler call. + safe_say('Waiting gracefully for cold shutdown to complete...', sys.__stdout__) + + +def on_cold_shutdown(worker: Worker): + """Signal handler for cold shutdown. + + Registered for SIGQUIT and SIGINT (Ctrl+C) signals. If REMAP_SIGTERM is set to "SIGQUIT", this handler will also + be registered for SIGTERM. + + This handler will initiate the cold (and soft if enabled) shutdown procesdure for the worker. + + Worker running with N tasks: + - SIGTERM: + -The worker will initiate the warm shutdown process until all tasks are finished. Additional. + SIGTERM signals will be ignored. SIGQUIT will transition to the cold shutdown process described below. + - SIGQUIT: + - The worker will initiate the cold shutdown process. + - If the soft shutdown is enabled, the worker will wait for the tasks to finish up to the soft + shutdown timeout (practically having a limited warm shutdown just before the cold shutdown). + - Cancel all tasks (from the MainProcess) and allow the worker to complete the cold shutdown + process gracefully. + + Caveats: + - SIGINT (Ctrl+C) signal is defined to replace itself with the cold shutdown (SIGQUIT) after first use, + and to emit a message to the user to hit Ctrl+C again to initiate the cold shutdown process. But, most + important, it will also be caught in WorkController.start() to initiate the warm shutdown process. + - SIGTERM will also be handled in WorkController.start() to initiate the warm shutdown process (the same). + - If REMAP_SIGTERM is set to "SIGQUIT", the SIGTERM signal will be remapped to SIGQUIT, and the cold + shutdown process will be initiated instead of the warm shutdown process using SIGTERM. + - If SIGQUIT is received (also via SIGINT) during the cold/soft shutdown process, the handler will cancel all + unacked requests but still wait for the soft shutdown process to finish before terminating the worker + gracefully. The next time the signal is received though, the worker will terminate immediately by force. + + So, the purpose of this handler is to allow waiting for the soft shutdown timeout, then cancel all tasks from + the MainProcess and let the WorkController.terminate() to terminate the worker naturally. If the soft shutdown + is disabled, it will immediately cancel all tasks let the cold shutdown finish normally. + + Args: + worker (Worker): The worker that received the signal. + """ + safe_say('worker: Hitting Ctrl+C again will terminate all running tasks!', sys.__stdout__) + + # Replace the signal handler for SIGINT (Ctrl+C) and SIGQUIT (and possibly SIGTERM) + install_worker_term_hard_handler(worker, sig='SIGINT', callback=during_soft_shutdown) + install_worker_term_hard_handler(worker, sig='SIGQUIT', callback=during_soft_shutdown) + if REMAP_SIGTERM == "SIGQUIT": + install_worker_term_hard_handler(worker, sig='SIGTERM', callback=during_soft_shutdown) + # else, SIGTERM will print the _shutdown_handler's message and do nothing, every time it is received.. + + # Initiate soft shutdown process (if enabled and tasks are running) + worker.wait_for_soft_shutdown() + + # Cancel all unacked requests and allow the worker to terminate naturally + worker.consumer.cancel_all_unacked_requests() + + # Stop the pool to allow successful tasks call on_success() + worker.consumer.pool.stop() + + +# Allow SIGTERM to be remapped to SIGQUIT to initiate cold shutdown instead of warm shutdown using SIGTERM if REMAP_SIGTERM == "SIGQUIT": install_worker_term_handler = partial( - _shutdown_handler, sig='SIGTERM', how='Cold', exitcode=EX_FAILURE, + _shutdown_handler, sig='SIGTERM', how='Cold', callback=on_cold_shutdown, exitcode=EX_FAILURE, ) else: install_worker_term_handler = partial( _shutdown_handler, sig='SIGTERM', how='Warm', ) + if not is_jython: # pragma: no cover install_worker_term_hard_handler = partial( - _shutdown_handler, sig='SIGQUIT', how='Cold', - exitcode=EX_FAILURE, + _shutdown_handler, sig='SIGQUIT', how='Cold', callback=on_cold_shutdown, exitcode=EX_FAILURE, ) else: # pragma: no cover install_worker_term_handler = \ @@ -317,9 +436,9 @@ def _handle_request(*args): def on_SIGINT(worker): - safe_say('worker: Hitting Ctrl+C again will terminate all running tasks!', + safe_say('worker: Hitting Ctrl+C again will initiate cold shutdown, terminating all running tasks!', sys.__stdout__) - install_worker_term_hard_handler(worker, sig='SIGINT') + install_worker_term_hard_handler(worker, sig='SIGINT', verbose=False) if not is_jython: # pragma: no cover diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index cae0b5446ea..551dfd586a7 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -730,6 +730,18 @@ def __repr__(self): self=self, state=self.blueprint.human_state(), ) + def cancel_all_unacked_requests(self): + """Cancel all unacked requests with late acknowledgement enabled.""" + + def should_cancel(request): + return request.task.acks_late and not request.acknowledged + + requests_to_cancel = tuple(filter(should_cancel, active_requests)) + + if requests_to_cancel: + for request in requests_to_cancel: + request.cancel(self.pool) + class Evloop(bootsteps.StartStopStep): """Event loop service. diff --git a/celery/worker/request.py b/celery/worker/request.py index 5d7c93a467c..1e337b84fc5 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -777,7 +777,7 @@ def on_success(self, failed__retval__runtime, **kwargs): if isinstance(exc, (SystemExit, KeyboardInterrupt)): raise exc return self.on_failure(retval, return_ok=True) - task_ready(self) + task_ready(self, successful=True) if acks_late: self.acknowledge() diff --git a/celery/worker/worker.py b/celery/worker/worker.py index 28609d9d8c5..b08a1d6d1e1 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -15,6 +15,7 @@ import os import sys from datetime import datetime, timezone +from time import sleep from billiard import cpu_count from kombu.utils.compat import detect_environment @@ -241,7 +242,7 @@ def should_use_eventloop(self): not self.app.IS_WINDOWS) def stop(self, in_sighandler=False, exitcode=None): - """Graceful shutdown of the worker server.""" + """Graceful shutdown of the worker server (Warm shutdown).""" if exitcode is not None: self.exitcode = exitcode if self.blueprint.state == RUN: @@ -251,7 +252,7 @@ def stop(self, in_sighandler=False, exitcode=None): self._send_worker_shutdown() def terminate(self, in_sighandler=False): - """Not so graceful shutdown of the worker server.""" + """Not so graceful shutdown of the worker server (Cold shutdown).""" if self.blueprint.state != TERMINATE: self.signal_consumer_close() if not in_sighandler or self.pool.signal_safe: @@ -407,3 +408,24 @@ def setup_defaults(self, concurrency=None, loglevel='WARN', logfile=None, 'worker_disable_rate_limits', disable_rate_limits, ) self.worker_lost_wait = either('worker_lost_wait', worker_lost_wait) + + def wait_for_soft_shutdown(self): + """Wait :setting:`worker_soft_shutdown_timeout` if soft shutdown is enabled. + + To enable soft shutdown, set the :setting:`worker_soft_shutdown_timeout` in the + configuration. Soft shutdown can be used to allow the worker to finish processing + few more tasks before initiating a cold shutdown. This mechanism allows the worker + to finish short tasks that are already in progress and requeue long-running tasks + to be picked up by another worker. + + .. warning:: + If there are no tasks in the worker, the worker will not wait for the + soft shutdown timeout even if it is set as it makes no sense to wait for + the timeout when there are no tasks to process. + """ + requests = tuple(state.active_requests) + app = self.app + if app.conf.worker_soft_shutdown_timeout > 0 and requests: + log = f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + logger.warning(log) + sleep(app.conf.worker_soft_shutdown_timeout) diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 088da6bafd2..7b658f5d906 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -199,6 +199,30 @@ with the same name: The value must be an int describing the number of seconds. +Soft Shutdown +------------- + +During :ref:`shutdown `, the worker will attempt to re-queue any unacknowledged messages +with :setting:`task_acks_late` enabled. However, if the worker is terminated forcefully +(:ref:`cold shutdown `), the worker might not be able to re-queue the tasks on time, +and they will not be consumed again until the :ref:`redis-visibility_timeout` has passed. This creates a +problem when the :ref:`redis-visibility_timeout` is very high and a worker needs to shut down just after it has +received a task. If the task is not re-queued in such case, it will need to wait for the long visibility timeout +to pass before it can be consumed again, leading to potentially very long delays in tasks execution. + +The :ref:`soft shutdown ` introduces a time-limited warm shutdown phase just before +the :ref:`cold shutdown `. This time window significantly increases the chances of +re-queuing the tasks during shutdown which mitigates the problem of long visibility timeouts. + +To enable the :ref:`soft shutdown `, set the :setting:`worker_soft_shutdown_timeout` to a value +greater than 0. The value must be an float describing the number of seconds. During this time, the worker will +continue to process the running tasks until the timeout expires, after which the :ref:`cold shutdown ` +will be initiated automatically to terminate the worker gracefully. + +If the :ref:`REMAP_SIGTERM ` is configured to SIGQUIT in the environment variables, and +the :setting:`worker_soft_shutdown_timeout` is set, the worker will initiate the :ref:`soft shutdown ` +when it receives the :sig:`TERM` signal (*and* the :sig:`QUIT` signal). + Key eviction ------------ diff --git a/docs/getting-started/backends-and-brokers/sqs.rst b/docs/getting-started/backends-and-brokers/sqs.rst index 9017871b984..1e67bc2b58b 100644 --- a/docs/getting-started/backends-and-brokers/sqs.rst +++ b/docs/getting-started/backends-and-brokers/sqs.rst @@ -245,25 +245,25 @@ Caveats - If a task isn't acknowledged within the ``visibility_timeout``, the task will be redelivered to another worker and executed. - This causes problems with ETA/countdown/retry tasks where the - time to execute exceeds the visibility timeout; in fact if that - happens it will be executed again, and again in a loop. + This causes problems with ETA/countdown/retry tasks where the + time to execute exceeds the visibility timeout; in fact if that + happens it will be executed again, and again in a loop. - So you have to increase the visibility timeout to match - the time of the longest ETA you're planning to use. + So you have to increase the visibility timeout to match + the time of the longest ETA you're planning to use. - Note that Celery will redeliver messages at worker shutdown, - so having a long visibility timeout will only delay the redelivery - of 'lost' tasks in the event of a power failure or forcefully terminated - workers. + Note that Celery will redeliver messages at worker shutdown, + so having a long visibility timeout will only delay the redelivery + of 'lost' tasks in the event of a power failure or forcefully terminated + workers. - Periodic tasks won't be affected by the visibility timeout, - as it is a concept separate from ETA/countdown. + Periodic tasks won't be affected by the visibility timeout, + as it is a concept separate from ETA/countdown. - The maximum visibility timeout supported by AWS as of this writing - is 12 hours (43200 seconds):: + The maximum visibility timeout supported by AWS as of this writing + is 12 hours (43200 seconds):: - broker_transport_options = {'visibility_timeout': 43200} + broker_transport_options = {'visibility_timeout': 43200} - SQS doesn't yet support worker remote control commands. @@ -283,6 +283,27 @@ Caveats } task.apply_async(**message_properties) +- During :ref:`shutdown `, the worker will attempt to re-queue any unacknowledged messages + with :setting:`task_acks_late` enabled. However, if the worker is terminated forcefully + (:ref:`cold shutdown `), the worker might not be able to re-queue the tasks on time, + and they will not be consumed again until the :ref:`sqs-visibility-timeout` has passed. This creates a + problem when the :ref:`sqs-visibility-timeout` is very high and a worker needs to shut down just after it has + received a task. If the task is not re-queued in such case, it will need to wait for the long visibility timeout + to pass before it can be consumed again, leading to potentially very long delays in tasks execution. + + The :ref:`soft shutdown ` introduces a time-limited warm shutdown phase just before + the :ref:`cold shutdown `. This time window significantly increases the chances of + re-queuing the tasks during shutdown which mitigates the problem of long visibility timeouts. + + To enable the :ref:`soft shutdown `, set the :setting:`worker_soft_shutdown_timeout` to a value + greater than 0. The value must be an float describing the number of seconds. During this time, the worker will + continue to process the running tasks until the timeout expires, after which the :ref:`cold shutdown ` + will be initiated automatically to terminate the worker gracefully. + + If the :ref:`REMAP_SIGTERM ` is configured to SIGQUIT in the environment variables, and + the :setting:`worker_soft_shutdown_timeout` is set, the worker will initiate the :ref:`soft shutdown ` + when it receives the :sig:`TERM` signal (*and* the :sig:`QUIT` signal). + .. _sqs-results-configuration: diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 5d7ed9c8b07..c36d80246a3 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3275,6 +3275,29 @@ Default: Enabled. Automatically detect if any of the queues in :setting:`task_queues` are quorum queues (including the :setting:`task_default_queue`) and disable the global QoS if any quorum queue is detected. +.. setting:: worker_soft_shutdown_timeout + +``worker_soft_shutdown_timeout`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Default: 0.0. + +The standard :ref:`warm shutdown ` will wait for all tasks to finish before shutting down +unless the cold shutdown is triggered. The :ref:`soft shutdown ` will add a waiting time +before the cold shutdown is initiated. This setting specifies how long the worker will wait before the cold shutdown +is initiated and the worker is terminated. + +This will apply also when the worker initiate :ref:`cold shutdown ` without doing a warm shutdown first. + +If the value is set to 0.0, the soft shutdown will be practically disabled. Regardless of the value, the soft shutdown +will be disabled if there are no tasks running. + +Experiment with this value to find the optimal time for your tasks to finish gracefully before the worker is terminated. +Recommended values can be 10, 30, 60 seconds. Too high value can lead to a long waiting time before the worker is terminated +and trigger a :sig:`KILL` signal to forcefully terminate the worker by the host system. + .. _conf-events: Events diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 1304a6ad605..29ccb04fe4c 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -101,6 +101,138 @@ longer version: On Linux systems, Celery now supports sending :sig:`KILL` signal to all child processes after worker termination. This is done via `PR_SET_PDEATHSIG` option of ``prctl(2)``. +.. _worker_shutdown: + +Worker Shutdown +--------------- + +We will use the terms *Warm, Soft, Cold, Hard* to describe the different stages of worker shutdown. +The worker will initiate the shutdown process when it receives the :sig:`TERM` or :sig:`QUIT` signal. +The :sig:`INT` (Ctrl-C) signal is also handled during the shutdown process and always triggers the +next stage of the shutdown process. + +.. _worker-warm-shutdown: + +Warm Shutdown +~~~~~~~~~~~~~ + +When the worker receives the :sig:`TERM` signal, it will initiate a warm shutdown. The worker will +finish all currently executing tasks before it actually terminates. The first time the worker receives +the :sig:`INT` (Ctrl-C) signal, it will initiate a warm shutdown as well. + +The warm shutdown will stop the call to :func:`WorkController.start() ` +and will call :func:`WorkController.stop() `. + +- Additional :sig:`TERM` signals will be ignored during the warm shutdown process. +- The next :sig:`INT` signal will trigger the next stage of the shutdown process. + +.. _worker-cold-shutdown: + +Cold Shutdown +~~~~~~~~~~~~~ + +Cold shutdown is initiated when the worker receives the :sig:`QUIT` signal. The worker will stop +all currently executing tasks and terminate immediately. + +.. _worker-REMAP_SIGTERM: + +.. note:: + + If the environment variable ``REMAP_SIGTERM`` is set to ``SIGQUIT``, the worker will also initiate + a cold shutdown when it receives the :sig:`TERM` signal instead of a warm shutdown. + +The cold shutdown will stop the call to :func:`WorkController.start() ` +and will call :func:`WorkController.terminate() `. + +If the warm shutdown already started, the transition to cold shutdown will run a signal handler ``on_cold_shutdown`` +to cancel all currently executing tasks from the MainProcess and potentially trigger the :ref:`worker-soft-shutdown`. + +.. _worker-soft-shutdown: + +Soft Shutdown +~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Soft shutdown is a time limited warm shutdown, initiated just before the cold shutdown. The worker will +allow :setting:`worker_soft_shutdown_timeout` seconds for all currently executing tasks to finish before +it terminates. If the time limit is reached, the worker will initiate a cold shutdown and cancel all currently +executing tasks. If the :sig:`QUIT` signal is received during the soft shutdown, the worker will cancel all +currently executing tasks but still wait for the time limit to finish before terminating, giving a chance for +the worker to perform the cold shutdown a little more gracefully. + +The soft shutdown is disabled by default to maintain backward compatibility with the :ref:`worker-cold-shutdown` +behavior. To enable the soft shutdown, set :setting:`worker_soft_shutdown_timeout` to a positive float value. + +For example, when setting ``worker_soft_shutdown_timeout=3``, the worker will allow 3 seconds for all currently +executing tasks to finish before it terminates. If the time limit is reached, the worker will initiate a cold shutdown +and cancel all currently executing tasks. + +.. code-block:: console + + [INFO/MainProcess] Task myapp.long_running_task[6f748357-b2c7-456a-95de-f05c00504042] received + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 1/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 2/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 3/2000s + ^C + worker: Hitting Ctrl+C again will initiate cold shutdown, terminating all running tasks! + + worker: Warm shutdown (MainProcess) + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 4/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 5/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 6/2000s + ^C + worker: Hitting Ctrl+C again will terminate all running tasks! + [WARNING/MainProcess] Initiating Soft Shutdown, terminating in 3 seconds + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 7/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 8/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 9/2000s + [WARNING/MainProcess] Restoring 1 unacknowledged message(s) + +- The next :sig:`QUIT` signal will cancel the tasks that are still running in the soft shutdown, but the worker + will still wait for the time limit to finish before terminating. +- The next (2nd) :sig:`QUIT` or :sig:`INT` signal will trigger the next stage of the shutdown process. + +.. _worker-hard-shutdown: + +Hard Shutdown +~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Hard shutdown is mostly for local or debug purposes, allowing to spam the :sig:`INT` (Ctrl-C) signal +to force the worker to terminate immediately. The worker will stop all currently executing tasks and +terminate immediately by raising a :exc:`@WorkerTerminate` exception in the MainProcess. + +For example, notice the ``^C`` in the logs below (using the :sig:`INT` signal to move from stage to stage): + +.. code-block:: console + + [INFO/MainProcess] Task myapp.long_running_task[7235ac16-543d-4fd5-a9e1-2d2bb8ab630a] received + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 1/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 2/2000s + ^C + worker: Hitting Ctrl+C again will initiate cold shutdown, terminating all running tasks! + + worker: Warm shutdown (MainProcess) + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 3/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 4/2000s + ^C + worker: Hitting Ctrl+C again will terminate all running tasks! + [WARNING/MainProcess] Initiating Soft Shutdown, terminating in 10 seconds + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 5/2000s + [WARNING/ForkPoolWorker-8] long_running_task is running, sleeping 6/2000s + ^C + Waiting gracefully for cold shutdown to complete... + + worker: Cold shutdown (MainProcess) + ^C[WARNING/MainProcess] Restoring 1 unacknowledged message(s) + +.. warning:: + + The log ``Restoring 1 unacknowledged message(s)`` is misleading as it is not guaranteed that the message + will be restored after a hard shutdown. The :ref:`worker-soft-shutdown` allows adding a time window just between + the warm and the cold shutdown that improves the gracefulness of the shutdown process. .. _worker-restarting: diff --git a/t/smoke/operations/worker_kill.py b/t/smoke/operations/worker_kill.py index 7c4b2583e3f..767cdf45bcc 100644 --- a/t/smoke/operations/worker_kill.py +++ b/t/smoke/operations/worker_kill.py @@ -9,33 +9,39 @@ class WorkerKill: """Kills a worker in different ways.""" + class Method(Enum): DOCKER_KILL = auto() CONTROL_SHUTDOWN = auto() + SIGTERM = auto() + SIGQUIT = auto() def kill_worker( self, worker: CeleryTestWorker, method: WorkerKill.Method, - assertion: bool = True, ) -> None: """Kill a Celery worker. Args: worker (CeleryTestWorker): Worker to kill. method (WorkerKill.Method): The method to kill the worker. - assertion (bool, optional): Whether to assert the worker state after kill. Defaults to True. """ if method == WorkerKill.Method.DOCKER_KILL: worker.kill() + assert worker.container.status == "exited", ( + f"Worker container should be in 'exited' state after kill, " + f"but is in '{worker.container.status}' state instead." + ) + if method == WorkerKill.Method.CONTROL_SHUTDOWN: control: Control = worker.app.control control.shutdown(destination=[worker.hostname()]) worker.container.reload() - if assertion: - assert worker.container.status == "exited", ( - f"Worker container should be in 'exited' state after kill, " - f"but is in '{worker.container.status}' state instead." - ) + if method == WorkerKill.Method.SIGTERM: + worker.kill(signal="SIGTERM") + + if method == WorkerKill.Method.SIGQUIT: + worker.kill(signal="SIGQUIT") diff --git a/t/smoke/tests/failover/test_worker_failover.py b/t/smoke/tests/failover/test_worker_failover.py index 301d7be1047..33e2e3d87c9 100644 --- a/t/smoke/tests/failover/test_worker_failover.py +++ b/t/smoke/tests/failover/test_worker_failover.py @@ -7,8 +7,6 @@ from t.smoke.conftest import SuiteOperations, WorkerKill from t.smoke.tasks import long_running_task -MB = 1024 * 1024 - @pytest.fixture def celery_worker_cluster( @@ -26,8 +24,6 @@ class test_worker_failover(SuiteOperations): def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.task_acks_late = True - if app.conf.broker_url.startswith("redis"): - app.conf.broker_transport_options = {"visibility_timeout": 1} return app def test_killing_first_worker( diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 042451f2980..28f67207ab8 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -16,9 +16,15 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY if app.conf.broker_url.startswith("redis"): - app.conf.broker_transport_options = {"visibility_timeout": 1} + app.conf.broker_transport_options = { + "visibility_timeout": 1, + "polling_interval": 1, + } if app.conf.result_backend.startswith("redis"): - app.conf.result_backend_transport_options = {"visibility_timeout": 1} + app.conf.result_backend_transport_options = { + "visibility_timeout": 1, + "polling_interval": 1, + } return app diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 15fbbf3cda8..e478a982e3f 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -1,11 +1,38 @@ +from time import sleep + import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup +from pytest_celery import (ALL_CELERY_BROKERS, CELERY_LOCALSTACK_BROKER, RESULT_TIMEOUT, CeleryTestBroker, + CeleryTestSetup, CeleryTestWorker, RabbitMQTestBroker, _is_vendor_installed) +import celery from celery import Celery -from celery.canvas import chain -from t.smoke.conftest import SuiteOperations, WorkerRestart +from celery.canvas import chain, group +from t.smoke.conftest import SuiteOperations, WorkerKill, WorkerRestart from t.smoke.tasks import long_running_task +if _is_vendor_installed("localstack"): + ALL_CELERY_BROKERS.add(CELERY_LOCALSTACK_BROKER) + + +@pytest.fixture(params=ALL_CELERY_BROKERS) +def celery_broker(request: pytest.FixtureRequest) -> CeleryTestBroker: # type: ignore + broker: CeleryTestBroker = request.getfixturevalue(request.param) + yield broker + broker.teardown() + + +def assert_container_exited(worker: CeleryTestWorker, attempts: int = RESULT_TIMEOUT): + """It might take a few moments for the container to exit after the worker is killed.""" + while attempts: + worker.container.reload() + if worker.container.status == "exited": + break + attempts -= 1 + sleep(1) + + worker.container.reload() + assert worker.container.status == "exited" + @pytest.mark.parametrize("method", list(WorkerRestart.Method)) class test_worker_restart(SuiteOperations): @@ -43,3 +70,341 @@ def test_restart_between_task_execution( assert first_res.get(RESULT_TIMEOUT) is True self.restart_worker(celery_setup.worker, method) assert second_res.get(RESULT_TIMEOUT) is True + + +class test_worker_shutdown(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.task_acks_late = True + return app + + def test_warm_shutdown(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log("worker: Warm shutdown (MainProcess)") + worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + + assert_container_exited(worker) + assert res.get(RESULT_TIMEOUT) + + def test_multiple_warm_shutdown_does_nothing(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + for _ in range(3): + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + + assert_container_exited(worker) + assert res.get(RESULT_TIMEOUT) + + def test_cold_shutdown(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") + + assert_container_exited(worker) + + with pytest.raises(celery.exceptions.TimeoutError): + res.get(timeout=5) + + def test_hard_shutdown_from_warm(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(420, verbose=True).set(queue=queue) + sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + + worker.wait_for_log("worker: Warm shutdown (MainProcess)") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + + def test_hard_shutdown_from_cold(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(420, verbose=True).set(queue=queue) + sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + + class test_REMAP_SIGTERM(SuiteOperations): + @pytest.fixture + def default_worker_env(self, default_worker_env: dict) -> dict: + default_worker_env.update({"REMAP_SIGTERM": "SIGQUIT"}) + return default_worker_env + + def test_cold_shutdown(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") + + assert_container_exited(worker) + + def test_hard_shutdown_from_cold(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(420, verbose=True).set(queue=queue) + sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + + class test_worker_soft_shutdown_timeout(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_soft_shutdown_timeout = 10 + return app + + def test_soft_shutdown(self, celery_setup: CeleryTestSetup): + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds", + timeout=5, + ) + worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + assert res.get(RESULT_TIMEOUT) + + def test_hard_shutdown_from_soft(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(420, verbose=True).set(queue=queue) + sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log("Waiting gracefully for cold shutdown to complete...") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + + assert_container_exited(worker) + + class test_REMAP_SIGTERM(SuiteOperations): + @pytest.fixture + def default_worker_env(self, default_worker_env: dict) -> dict: + default_worker_env.update({"REMAP_SIGTERM": "SIGQUIT"}) + return default_worker_env + + def test_soft_shutdown(self, celery_setup: CeleryTestSetup): + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + assert res.get(RESULT_TIMEOUT) + + def test_hard_shutdown_from_soft(self, celery_setup: CeleryTestSetup): + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(420, verbose=True).set(queue=queue) + sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log("Waiting gracefully for cold shutdown to complete...") + worker.wait_for_log("worker: Cold shutdown (MainProcess)", timeout=5) + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + + assert_container_exited(worker) + + class test_reset_visibility_timeout(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.prefetch_multiplier = 2 + app.conf.worker_concurrency = 10 + app.conf.broker_transport_options = { + "visibility_timeout": 3600, # 1 hour + "polling_interval": 1, + } + return app + + def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(15, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 1 unacknowledged message(s)") + assert_container_exited(worker) + worker.restart() + assert res.get(RESULT_TIMEOUT) + + def test_soft_shutdown_reset_visibility_timeout_group_one_finish(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + short_task = long_running_task.si(3, verbose=True).set(queue=queue) + short_task_res = short_task.freeze() + long_task = long_running_task.si(15, verbose=True).set(queue=queue) + long_task_res = long_task.freeze() + sig = group(short_task, long_task) + sig.delay() + + worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") + worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log(f"long_running_task[{short_task_res.id}] succeeded") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 1 unacknowledged message(s)") + assert_container_exited(worker) + assert short_task_res.get(RESULT_TIMEOUT) + + def test_soft_shutdown_reset_visibility_timeout_group_none_finish(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + short_task = long_running_task.si(15, verbose=True).set(queue=queue) + short_task_res = short_task.freeze() + long_task = long_running_task.si(15, verbose=True).set(queue=queue) + long_task_res = long_task.freeze() + sig = group(short_task, long_task) + res = sig.delay() + + worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") + worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 2 unacknowledged message(s)") + assert_container_exited(worker) + worker.restart() + assert res.get(RESULT_TIMEOUT) == [True, True] + assert short_task_res.get(RESULT_TIMEOUT) + assert long_task_res.get(RESULT_TIMEOUT) + + class test_REMAP_SIGTERM(SuiteOperations): + @pytest.fixture + def default_worker_env(self, default_worker_env: dict) -> dict: + default_worker_env.update({"REMAP_SIGTERM": "SIGQUIT"}) + return default_worker_env + + def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(15, verbose=True).set(queue=queue) + res = sig.delay() + + worker.wait_for_log("Starting long running task") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 1 unacknowledged message(s)") + assert_container_exited(worker) + worker.restart() + assert res.get(RESULT_TIMEOUT) + + def test_soft_shutdown_reset_visibility_timeout_group_one_finish( + self, + celery_setup: CeleryTestSetup, + ): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + short_task = long_running_task.si(3, verbose=True).set(queue=queue) + short_task_res = short_task.freeze() + long_task = long_running_task.si(15, verbose=True).set(queue=queue) + long_task_res = long_task.freeze() + sig = group(short_task, long_task) + sig.delay() + + worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") + worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + self.kill_worker(worker, WorkerKill.Method.SIGTERM) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log(f"long_running_task[{short_task_res.id}] succeeded") + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 1 unacknowledged message(s)") + assert_container_exited(worker) + assert short_task_res.get(RESULT_TIMEOUT) diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 3b8cb2a8322..23933050780 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -442,6 +442,32 @@ def test_cancel_long_running_tasks_on_connection_loss__warning(self): with pytest.deprecated_call(match=CANCEL_TASKS_BY_DEFAULT): c.on_connection_error_after_connected(Mock()) + @pytest.mark.usefixtures('depends_on_current_app') + def test_cancel_all_unacked_requests(self): + c = self.get_consumer() + + mock_request_acks_late_not_acknowledged = Mock(id='1') + mock_request_acks_late_not_acknowledged.task.acks_late = True + mock_request_acks_late_not_acknowledged.acknowledged = False + mock_request_acks_late_acknowledged = Mock(id='2') + mock_request_acks_late_acknowledged.task.acks_late = True + mock_request_acks_late_acknowledged.acknowledged = True + mock_request_acks_early = Mock(id='3') + mock_request_acks_early.task.acks_late = False + mock_request_acks_early.acknowledged = False + + active_requests.add(mock_request_acks_late_not_acknowledged) + active_requests.add(mock_request_acks_late_acknowledged) + active_requests.add(mock_request_acks_early) + + c.cancel_all_unacked_requests() + + mock_request_acks_late_not_acknowledged.cancel.assert_called_once_with(c.pool) + mock_request_acks_late_acknowledged.cancel.assert_not_called() + mock_request_acks_early.cancel.assert_not_called() + + active_requests.clear() + @pytest.mark.parametrize("broker_connection_retry", [True, False]) @pytest.mark.parametrize("broker_connection_retry_on_startup", [None, False]) @pytest.mark.parametrize("first_connection_attempt", [True, False]) diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index a0fd468e27b..a2c7cdcbee2 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -1193,3 +1193,12 @@ def timers(self): assert isinstance(w.semaphore, LaxBoundedSemaphore) P = w.pool P.start() + + def test_wait_for_soft_shutdown(self): + worker = self.worker + worker.app.conf.worker_soft_shutdown_timeout = 10 + request = Mock(name='task', id='1234213') + state.task_accepted(request) + with patch("celery.worker.worker.sleep") as sleep: + worker.wait_for_soft_shutdown() + sleep.assert_called_with(worker.app.conf.worker_soft_shutdown_timeout) From a8ecf180e0279778067b37cf62ce076d8b2eb204 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 3 Sep 2024 21:13:08 +0300 Subject: [PATCH 0799/1051] Added worker_enable_soft_shutdown_on_idle (#9231) * Added worker_enable_soft_shutdown_on_idle (useful to requeue ETA tasks on shutdown) * Added code coverage --- celery/app/defaults.py | 1 + celery/worker/worker.py | 6 ++++- docs/userguide/configuration.rst | 15 +++++++++++- docs/userguide/workers.rst | 2 ++ t/smoke/tests/test_worker.py | 40 ++++++++++++++++++++++++++++++++ t/unit/worker/test_worker.py | 24 +++++++++++++++++++ 6 files changed, 86 insertions(+), 2 deletions(-) diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 5a6ea5af1d4..34fbe94bcec 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -310,6 +310,7 @@ def __repr__(self): False, type='bool' ), soft_shutdown_timeout=Option(0.0, type='float'), + enable_soft_shutdown_on_idle=Option(False, type='bool'), concurrency=Option(None, type='int'), consumer=Option('celery.worker.consumer:Consumer', type='string'), direct=Option(False, type='bool', old={'celery_worker_direct'}), diff --git a/celery/worker/worker.py b/celery/worker/worker.py index b08a1d6d1e1..2444012310f 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -423,8 +423,12 @@ def wait_for_soft_shutdown(self): soft shutdown timeout even if it is set as it makes no sense to wait for the timeout when there are no tasks to process. """ - requests = tuple(state.active_requests) app = self.app + requests = tuple(state.active_requests) + + if app.conf.worker_enable_soft_shutdown_on_idle: + requests = True + if app.conf.worker_soft_shutdown_timeout > 0 and requests: log = f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" logger.warning(log) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index c36d80246a3..eedd3d19d29 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3292,12 +3292,25 @@ is initiated and the worker is terminated. This will apply also when the worker initiate :ref:`cold shutdown ` without doing a warm shutdown first. If the value is set to 0.0, the soft shutdown will be practically disabled. Regardless of the value, the soft shutdown -will be disabled if there are no tasks running. +will be disabled if there are no tasks running (unless :setting:`worker_enable_soft_shutdown_on_idle` is enabled). Experiment with this value to find the optimal time for your tasks to finish gracefully before the worker is terminated. Recommended values can be 10, 30, 60 seconds. Too high value can lead to a long waiting time before the worker is terminated and trigger a :sig:`KILL` signal to forcefully terminate the worker by the host system. +.. setting:: worker_enable_soft_shutdown_on_idle + +``worker_enable_soft_shutdown_on_idle`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +Default: False. + +If the :setting:`worker_soft_shutdown_timeout` is set to a value greater than 0.0, the worker will skip +the :ref:`soft shutdown ` anyways if there are no tasks running. This setting will +enable the soft shutdown even if there are no tasks running. + .. _conf-events: Events diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 29ccb04fe4c..b613f97d50b 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -163,6 +163,8 @@ the worker to perform the cold shutdown a little more gracefully. The soft shutdown is disabled by default to maintain backward compatibility with the :ref:`worker-cold-shutdown` behavior. To enable the soft shutdown, set :setting:`worker_soft_shutdown_timeout` to a positive float value. +The soft shutdown will be skipped if there are no tasks running. To force the soft shutdown, *also* enable the +:setting:`worker_enable_soft_shutdown_on_idle` setting. For example, when setting ``worker_soft_shutdown_timeout=3``, the worker will allow 3 seconds for all currently executing tasks to finish before it terminates. If the time limit is reached, the worker will initiate a cold shutdown diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index e478a982e3f..6b7892a24d3 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -408,3 +408,43 @@ def test_soft_shutdown_reset_visibility_timeout_group_one_finish( worker.wait_for_log("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) assert short_task_res.get(RESULT_TIMEOUT) + + class test_worker_enable_soft_shutdown_on_idle(SuiteOperations): + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.worker_enable_soft_shutdown_on_idle = True + return app + + def test_soft_shutdown(self, celery_setup: CeleryTestSetup): + app = celery_setup.app + worker = celery_setup.worker + + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds", + ) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + + assert_container_exited(worker) + + def test_soft_shutdown_eta(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RabbitMQTestBroker): + pytest.skip("RabbitMQ does not support visibility timeout") + + app = celery_setup.app + queue = celery_setup.worker.worker_queue + worker = celery_setup.worker + sig = long_running_task.si(5, verbose=True).set(queue=queue) + res = sig.apply_async(countdown=app.conf.worker_soft_shutdown_timeout + 5) + + worker.wait_for_log(f"long_running_task[{res.id}] received") + self.kill_worker(worker, WorkerKill.Method.SIGQUIT) + worker.wait_for_log( + f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" + ) + worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.wait_for_log("Restoring 1 unacknowledged message(s)") + assert_container_exited(worker) + worker.restart() + assert res.get(RESULT_TIMEOUT) diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index a2c7cdcbee2..63145fd7bd0 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -1202,3 +1202,27 @@ def test_wait_for_soft_shutdown(self): with patch("celery.worker.worker.sleep") as sleep: worker.wait_for_soft_shutdown() sleep.assert_called_with(worker.app.conf.worker_soft_shutdown_timeout) + + def test_wait_for_soft_shutdown_no_tasks(self): + worker = self.worker + worker.app.conf.worker_soft_shutdown_timeout = 10 + worker.app.conf.worker_enable_soft_shutdown_on_idle = True + state.active_requests.clear() + with patch("celery.worker.worker.sleep") as sleep: + worker.wait_for_soft_shutdown() + sleep.assert_called_with(worker.app.conf.worker_soft_shutdown_timeout) + + def test_wait_for_soft_shutdown_no_wait(self): + worker = self.worker + request = Mock(name='task', id='1234213') + state.task_accepted(request) + with patch("celery.worker.worker.sleep") as sleep: + worker.wait_for_soft_shutdown() + sleep.assert_not_called() + + def test_wait_for_soft_shutdown_no_wait_no_tasks(self): + worker = self.worker + worker.app.conf.worker_enable_soft_shutdown_on_idle = True + with patch("celery.worker.worker.sleep") as sleep: + worker.wait_for_soft_shutdown() + sleep.assert_not_called() From ae4a47f784c982fd833eab5bd6f636277a69bd55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:49:08 +0300 Subject: [PATCH 0800/1051] Bump cryptography from 43.0.0 to 43.0.1 (#9233) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.0 to 43.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/43.0.0...43.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ce12e287454..ccb822680ef 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==43.0.0 +cryptography==43.0.1 From 11aef56ee930ddf9b6b7ba7e943bea14276f6892 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 4 Sep 2024 19:08:21 +0300 Subject: [PATCH 0801/1051] Added docs regarding the relevancy of soft shutdown and ETA tasks (#9238) --- docs/userguide/configuration.rst | 8 ++++++++ docs/userguide/workers.rst | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index eedd3d19d29..23b2974f34a 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3311,6 +3311,14 @@ If the :setting:`worker_soft_shutdown_timeout` is set to a value greater than 0. the :ref:`soft shutdown ` anyways if there are no tasks running. This setting will enable the soft shutdown even if there are no tasks running. +.. tip:: + + When the worker received ETA tasks, but the ETA has not been reached yet, and a shutdown is initiated, + the worker will **skip** the soft shutdown and initiate the cold shutdown immediately if there are no + tasks running. This may lead to failure in re-queueing the ETA tasks during worker teardown. To mitigate + this, enable this configuration to ensure the worker waits regadless, which gives enough time for a + graceful shutdown and successful re-queueing of the ETA tasks. + .. _conf-events: Events diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index b613f97d50b..1f2cef97c83 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -166,6 +166,14 @@ behavior. To enable the soft shutdown, set :setting:`worker_soft_shutdown_timeou The soft shutdown will be skipped if there are no tasks running. To force the soft shutdown, *also* enable the :setting:`worker_enable_soft_shutdown_on_idle` setting. +.. warning:: + + If the worker is not running any task but has ETA tasks reserved, the soft shutdown will not be initiated + unless the :setting:`worker_enable_soft_shutdown_on_idle` setting is enabled, which may lead to task loss + during the cold shutdown. When using ETA tasks, it is recommended to enable the soft shutdown on idle. + Experiment which :setting:`worker_soft_shutdown_timeout` value works best for your setup to reduce the risk + of task loss to a minimum. + For example, when setting ``worker_soft_shutdown_timeout=3``, the worker will allow 3 seconds for all currently executing tasks to finish before it terminates. If the time limit is reached, the worker will initiate a cold shutdown and cancel all currently executing tasks. From 0428222027e1b6f43621f66c4a8547a676f84731 Mon Sep 17 00:00:00 2001 From: Sergio Livi Date: Thu, 5 Sep 2024 15:05:32 +0200 Subject: [PATCH 0802/1051] Show broker_connection_retry_on_startup warning only if it evaluates as False (#9227) Co-authored-by: Asif Saif Uddin --- celery/worker/consumer/consumer.py | 15 ++++++++------- t/unit/worker/test_consumer.py | 10 +++++----- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 551dfd586a7..8241a976021 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -505,13 +505,14 @@ def _error_handler(exc, interval, next_step=CONNECTION_RETRY_STEP): # to determine whether connection retries are disabled. retry_disabled = not self.app.conf.broker_connection_retry - warnings.warn( - CPendingDeprecationWarning( - f"The broker_connection_retry configuration setting will no longer determine\n" - f"whether broker connection retries are made during startup in Celery 6.0 and above.\n" - f"If you wish to retain the existing behavior for retrying connections on startup,\n" - f"you should set broker_connection_retry_on_startup to {self.app.conf.broker_connection_retry}.") - ) + if retry_disabled: + warnings.warn( + CPendingDeprecationWarning( + "The broker_connection_retry configuration setting will no longer determine\n" + "whether broker connection retries are made during startup in Celery 6.0 and above.\n" + "If you wish to refrain from retrying connections on startup,\n" + "you should set broker_connection_retry_on_startup to False instead.") + ) else: if self.first_connection_attempt: retry_disabled = not self.app.conf.broker_connection_retry_on_startup diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 23933050780..a4c8ac6b196 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -478,12 +478,12 @@ def test_ensure_connected(self, subtests, broker_connection_retry, broker_connec c.app.conf.broker_connection_retry_on_startup = broker_connection_retry_on_startup c.app.conf.broker_connection_retry = broker_connection_retry - if broker_connection_retry_on_startup is None: - with subtests.test("Deprecation warning when startup is None"): - with pytest.deprecated_call(): - c.ensure_connected(Mock()) - if broker_connection_retry is False: + if broker_connection_retry_on_startup is None: + with subtests.test("Deprecation warning when startup is None"): + with pytest.deprecated_call(): + c.ensure_connected(Mock()) + with subtests.test("Does not retry when connect throws an error and retry is set to false"): conn = Mock() conn.connect.side_effect = ConnectionError() From c822a5a55f9f97f1949b63dd124e8032106bc779 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 6 Sep 2024 14:47:58 +0300 Subject: [PATCH 0803/1051] Fixed docker-docs CI failure (#9240) --- requirements/extras/sphinxautobuild.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/sphinxautobuild.txt b/requirements/extras/sphinxautobuild.txt index 01ce5dfaf45..6113624e320 100644 --- a/requirements/extras/sphinxautobuild.txt +++ b/requirements/extras/sphinxautobuild.txt @@ -1 +1 @@ -sphinx-autobuild>=2021.3.14 \ No newline at end of file +sphinx-autobuild>=2021.3.14,!=2024.9.3 \ No newline at end of file From 90feae37357d9f87b01f0784c0d28dec23c5eb1f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 7 Sep 2024 19:17:48 +0300 Subject: [PATCH 0804/1051] Added docker cleanup auto-fixture to improve smoke tests stability (#9243) * Added docker cleanup auto-fixture to improve smoke tests stability * Use docker API instead of subprocess --- t/smoke/conftest.py | 51 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 073821c61b2..a9ddd9e54d6 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -5,6 +5,7 @@ RedisContainer) from pytest_docker_tools import container, fetch +import docker from celery import Celery from t.smoke.operations.task_termination import TaskTermination from t.smoke.operations.worker_kill import WorkerKill @@ -90,3 +91,53 @@ def default_worker_app(default_worker_app: Celery) -> Celery: if app.conf.broker_url and app.conf.broker_url.startswith("sqs"): app.conf.broker_transport_options["region"] = LOCALSTACK_CREDS["AWS_DEFAULT_REGION"] return app + + +@pytest.fixture(scope="module", autouse=True) +def auto_clean_docker_resources(): + """Clean up Docker resources after each test module.""" + # Used for debugging + verbose = False + + def log(message): + if verbose: + print(message) + + def cleanup_docker_resources(): + """Function to clean up Docker containers, networks, and volumes based on labels.""" + docker_client = docker.from_env() + + try: + # Clean up containers with the label 'creator=pytest-docker-tools' + containers = docker_client.containers.list(all=True, filters={"label": "creator=pytest-docker-tools"}) + for con in containers: + con.reload() # Ensure we have the latest status + if con.status != "running": # Only remove non-running containers + log(f"Removing container {con.name}") + con.remove(force=True) + else: + log(f"Skipping running container {con.name}") + + # Clean up networks with names starting with 'pytest-' + networks = docker_client.networks.list(names=["pytest-*"]) + for network in networks: + if not network.containers: # Check if the network is in use + log(f"Removing network {network.name}") + network.remove() + else: + log(f"Skipping network {network.name}, still in use") + + # Clean up volumes with names starting with 'pytest-*' + volumes = docker_client.volumes.list(filters={"name": "pytest-*"}) + for volume in volumes: + if not volume.attrs.get("UsageData", {}).get("RefCount", 0): # Check if volume is not in use + log(f"Removing volume {volume.name}") + volume.remove() + else: + log(f"Skipping volume {volume.name}, still in use") + + except Exception as e: + log(f"Error occurred while cleaning up Docker resources: {e}") + + log("--- Running Docker resource cleanup ---") + cleanup_docker_resources() From 8951306d200e887962cbc121ab5421624a3114c0 Mon Sep 17 00:00:00 2001 From: Zhong Zheng Date: Sun, 8 Sep 2024 21:03:38 +1000 Subject: [PATCH 0805/1051] print is not thread-safe, so should not be used in signal handler (#9222) * print is not thread-safe, so should not be used in signal handler * Moved unit tests to class test_WorkerApp * only writes when fd has file descriptor value * use the original __stdout__ and __stderr__ * sys.__stderr__ is not mutable * no format change for better diff * retain function interface --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- celery/apps/worker.py | 3 ++- t/unit/worker/test_worker.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 5ad3db0eaba..435d333eebb 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -78,7 +78,8 @@ def active_thread_count(): def safe_say(msg, f=sys.__stderr__): - print(f'\n{msg}', file=f, flush=True) + if hasattr(f, 'fileno') and f.fileno() is not None: + os.write(f.fileno(), f'\n{msg}\n'.encode()) class Worker(WorkController): diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index 63145fd7bd0..c14c3c89f55 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -19,6 +19,7 @@ from kombu.utils.uuid import uuid import t.skip +from celery.apps.worker import safe_say from celery.bootsteps import CLOSE, RUN, TERMINATE, StartStopStep from celery.concurrency.base import BasePool from celery.exceptions import (ImproperlyConfigured, InvalidTaskError, TaskRevokedError, WorkerShutdown, @@ -1226,3 +1227,18 @@ def test_wait_for_soft_shutdown_no_wait_no_tasks(self): with patch("celery.worker.worker.sleep") as sleep: worker.wait_for_soft_shutdown() sleep.assert_not_called() + + +class test_WorkerApp: + + def test_safe_say_defaults_to_stderr(self, capfd): + safe_say("hello") + captured = capfd.readouterr() + assert "\nhello\n" == captured.err + assert "" == captured.out + + def test_safe_say_writes_to_std_out(self, capfd): + safe_say("out", sys.stdout) + captured = capfd.readouterr() + assert "\nout\n" == captured.out + assert "" == captured.err From 33c78f0a4367b5f841b810cb70ebb676ca4ce26d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 8 Sep 2024 16:38:19 +0300 Subject: [PATCH 0806/1051] Prepare for (pre) release: v5.5.0b3 (#9244) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0b2 → 5.5.0b3 * Added Changelog for v5.5.0b3 --- .bumpversion.cfg | 2 +- Changelog.rst | 118 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 122 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e9e03aeeeaa..c0fbfd093bc 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0b2 +current_version = 5.5.0b3 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index cc417b4a7a0..6f2501d82e3 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,124 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0b3: + +5.5.0b3 +======= + +:release-date: 2024-09-08 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 3 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Added SQS (localstack) broker to canvas smoke tests (#9179) +- Pin elastic-transport to <= latest version 8.15.0 (#9182) +- Update elasticsearch requirement from <=8.14.0 to <=8.15.0 (#9186) +- Improve formatting (#9188) +- Add basic helm chart for celery (#9181) +- Update kafka.rst (#9194) +- Update pytest-order to 1.3.0 (#9198) +- Update mypy to 1.11.2 (#9206) +- All added to routes (#9204) +- Fix typos discovered by codespell (#9212) +- Use tzdata extras with zoneinfo backports (#8286) +- Use `docker compose` in Contributing's doc build section (#9219) +- Failing test for issue #9119 (#9215) +- Fix date_done timezone issue (#8385) +- CI Fixes to smoke tests (#9223) +- Fix: passes current request context when pushing to request_stack (#9208) +- Fix broken link in the Using RabbitMQ docs page (#9226) +- Added Soft Shutdown Mechanism (#9213) +- Added worker_enable_soft_shutdown_on_idle (#9231) +- Bump cryptography from 43.0.0 to 43.0.1 (#9233) +- Added docs regarding the relevancy of soft shutdown and ETA tasks (#9238) +- Show broker_connection_retry_on_startup warning only if it evaluates as False (#9227) +- Fixed docker-docs CI failure (#9240) +- Added docker cleanup auto-fixture to improve smoke tests stability (#9243) +- print is not thread-safe, so should not be used in signal handler (#9222) +- Prepare for (pre) release: v5.5.0b3 (#9244) + .. _version-5.5.0b2: 5.5.0b2 diff --git a/README.rst b/README.rst index e82bfb88dde..94a78e4fc53 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0b2 (immunity) +:Version: 5.5.0b3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 5df02aa2def..187dfddb8d2 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0b2' +__version__ = '5.5.0b3' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 6850e0a89f4..cfb8a08c2f7 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0b2 (immunity) +:Version: 5.5.0b3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 21f73b8e8f09d999af411006ebc6126992f5fd9c Mon Sep 17 00:00:00 2001 From: schnee Date: Mon, 9 Sep 2024 22:48:26 +0800 Subject: [PATCH 0807/1051] Correct the error description in exception message when validate soft_time_limit (#9246) * Correct the error description in exception message when validate soft_time_limit * Update celery/app/task.py --------- Co-authored-by: Asif Saif Uddin --- CONTRIBUTORS.txt | 1 + celery/app/task.py | 6 +++--- t/integration/test_tasks.py | 2 +- t/smoke/tests/test_tasks.py | 2 +- t/unit/tasks/test_tasks.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index b651f3ae414..c86f3c1d559 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -301,3 +301,4 @@ Johannes Faigle, 2024/06/18 Giovanni Giampauli, 2024/06/26 Shamil Abdulaev, 2024/08/05 Nikos Atlas, 2024/08/26 +Narasux, 2024/09/09 diff --git a/celery/app/task.py b/celery/app/task.py index ed1d6ed854b..951c75824b7 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -543,8 +543,8 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, TypeError: If not enough arguments are passed, or too many arguments are passed. Note that signature checks may be disabled by specifying ``@task(typing=False)``. - ValueError: If soft_time_limit and time_limit are set, - and soft_time_limit is less than time_limit + ValueError: If soft_time_limit and time_limit both are set + but soft_time_limit is greater than time_limit kombu.exceptions.OperationalError: If a connection to the transport cannot be made, or if the connection is lost. @@ -553,7 +553,7 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, :meth:`kombu.Producer.publish`. """ if self.soft_time_limit and self.time_limit and self.soft_time_limit > self.time_limit: - raise ValueError('soft_time_limit must be greater than or equal to time_limit') + raise ValueError('soft_time_limit must be less than or equal to time_limit') if self.typing: try: diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index c6fc7476687..76c46fd3f65 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -477,7 +477,7 @@ def test_properties(self, celery_session_worker): @flaky def test_soft_time_limit_exceeding_time_limit(self): - with pytest.raises(ValueError, match='soft_time_limit must be greater than or equal to time_limit'): + with pytest.raises(ValueError, match='soft_time_limit must be less than or equal to time_limit'): result = soft_time_limit_must_exceed_time_limit.apply_async() result.get(timeout=5) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 1878687ecca..4175f0d21cb 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -140,5 +140,5 @@ def test_soft_time_limit_lower_than_time_limit(self, celery_setup: CeleryTestSet def test_soft_time_limit_must_exceed_time_limit(self, celery_setup: CeleryTestSetup): sig = soft_time_limit_must_exceed_time_limit.s() - with pytest.raises(ValueError, match="soft_time_limit must be greater than or equal to time_limit"): + with pytest.raises(ValueError, match="soft_time_limit must be less than or equal to time_limit"): sig.apply_async(queue=celery_setup.worker.worker_queue) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 7d84f108de3..f262efc1bc6 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1421,7 +1421,7 @@ def yyy(): assert yyy_result.state == 'FAILURE' except ValueError as e: - assert str(e) == 'soft_time_limit must be greater than or equal to time_limit' + assert str(e) == 'soft_time_limit must be less than or equal to time_limit' class test_apply_task(TasksCase): From 95f4bf0b07b4af81aa848b29def4448e90d329ff Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Tue, 10 Sep 2024 08:45:52 +0300 Subject: [PATCH 0808/1051] Update msgpack from 1.0.8 to 1.1.0 --- requirements/extras/msgpack.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index 82308951b89..a9fdf042422 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.0.8 +msgpack==1.1.0 From 82d7895e3a78fd7682870fb5084ebce582c9f2ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pacheco?= Date: Tue, 10 Sep 2024 06:38:49 -0400 Subject: [PATCH 0809/1051] fix(utils): _is_ambigious -> _is_ambiguous (#9248) internal helper so no public API impact, but we may want to add to changelog --- celery/utils/time.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/celery/utils/time.py b/celery/utils/time.py index 2c14db29d30..014bc39b22d 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -321,7 +321,7 @@ def _can_detect_ambiguous(tz: tzinfo) -> bool: return isinstance(tz, ZoneInfo) or hasattr(tz, "is_ambiguous") -def _is_ambigious(dt: datetime, tz: tzinfo) -> bool: +def _is_ambiguous(dt: datetime, tz: tzinfo) -> bool: """Helper function to determine if a timezone is ambiguous using python's dateutil module. Returns False if the timezone cannot detect ambiguity, or if there is no ambiguity, otherwise True. @@ -338,7 +338,7 @@ def make_aware(dt: datetime, tz: tzinfo) -> datetime: """Set timezone for a :class:`~datetime.datetime` object.""" dt = dt.replace(tzinfo=tz) - if _is_ambigious(dt, tz): + if _is_ambiguous(dt, tz): dt = min(dt.replace(fold=0), dt.replace(fold=1)) return dt From 30fcb8adb425cd1d2bdcd06ce26022d9db3f8b08 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 10 Sep 2024 18:02:14 +0300 Subject: [PATCH 0810/1051] Reduced Smoke Tests to min/max supported python (3.8/3.12) (#9252) --- .github/workflows/python-package.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 41e93544f5a..dbba57e9f19 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -133,7 +133,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -170,7 +170,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -207,7 +207,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -244,7 +244,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -281,7 +281,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -318,7 +318,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -355,7 +355,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -392,7 +392,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -429,7 +429,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images @@ -466,7 +466,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.12'] steps: - name: Fetch Docker Images From ed546cde241ed2bf236043f0e904482b1f6fa2df Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 10 Sep 2024 09:48:21 -0700 Subject: [PATCH 0811/1051] Update pytest from 8.3.2 to 8.3.3 (#9253) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index c21c462d77b..87d5d7c0e26 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.3.2 +pytest==8.3.3 pytest-celery[all]>=1.1.1 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 From ce82f907a0a7f753e1c633f737fa75dbdafeb705 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 11:23:23 +0300 Subject: [PATCH 0812/1051] Update elasticsearch requirement from <=8.15.0 to <=8.15.1 (#9255) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.15.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 2717d520ff2..4a02b7374b7 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.15.0 +elasticsearch<=8.15.1 elastic-transport<=8.15.0 From 690d08e6c61a1c02e71542b7d5a19c6cccccef25 Mon Sep 17 00:00:00 2001 From: Francis Charette-Migneault Date: Wed, 11 Sep 2024 11:25:03 -0400 Subject: [PATCH 0813/1051] update mongodb without deprecated `[srv]` extra requirement (#9258) - fixes https://github.com/celery/celery/issues/9254 --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 04d59283325..5d7b45c49d9 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo[srv]>=4.0.2, <4.9 +pymongo>=4.3, <4.9 From bdbfab5ac3b6afcdab877d60e418acfa023fd4fc Mon Sep 17 00:00:00 2001 From: "blacksmith-sh[bot]" <157653362+blacksmith-sh[bot]@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:51:20 +0300 Subject: [PATCH 0814/1051] blacksmith.sh: Migrate workflows to Blacksmith (#9261) * Migrate workflows to Blacksmith * Removed "if: startsWith(matrix.os, ubuntu-)" * Skip "apt-get install" on windows --------- Co-authored-by: blacksmith-sh[bot] <157653362+blacksmith-sh[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docker.yml | 12 +++---- .github/workflows/linter.yml | 2 +- .github/workflows/python-package.yml | 50 +++++++++++++-------------- .github/workflows/semgrep.yml | 2 +- 5 files changed, 34 insertions(+), 34 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index d0b8564bb86..72078f37760 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -24,7 +24,7 @@ on: jobs: analyze: name: Analyze - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 permissions: actions: read contents: read diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 380a87c0eff..ba9d6c6ae41 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,7 +23,7 @@ on: jobs: docker-build: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 60 steps: - uses: actions/checkout@v4 @@ -31,7 +31,7 @@ jobs: run: make docker-build docker-docs: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 5 steps: - uses: actions/checkout@v4 @@ -39,7 +39,7 @@ jobs: run: make docker-docs smoke-tests_dev: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - uses: actions/checkout@v4 @@ -47,7 +47,7 @@ jobs: run: docker build -f t/smoke/workers/docker/dev . smoke-tests_latest: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - uses: actions/checkout@v4 @@ -55,7 +55,7 @@ jobs: run: docker build -f t/smoke/workers/docker/pypi . smoke-tests_pypi: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - uses: actions/checkout@v4 @@ -63,7 +63,7 @@ jobs: run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="5" . smoke-tests_legacy: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 50d911657fc..98a05f2b3a4 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -4,7 +4,7 @@ on: [pull_request, workflow_dispatch] jobs: linter: - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 steps: - name: Checkout branch diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index dbba57e9f19..b2716578571 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,7 +34,7 @@ jobs: fail-fast: false matrix: python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] - os: ["ubuntu-latest", "windows-latest"] + os: ["blacksmith-4vcpu-ubuntu-2204", "windows-latest"] exclude: - python-version: '3.9' os: "windows-latest" @@ -47,12 +47,12 @@ jobs: steps: - name: Install apt packages - if: startsWith(matrix.os, 'ubuntu-') + if: startsWith(matrix.os, 'blacksmith-4vcpu-ubuntu') run: | sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -80,7 +80,7 @@ jobs: if: needs.Unit.result == 'success' timeout-minutes: 240 - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -110,7 +110,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -129,7 +129,7 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -147,7 +147,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -166,7 +166,7 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -184,7 +184,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -203,7 +203,7 @@ jobs: needs: - Integration if: needs.Integration.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -221,7 +221,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -240,7 +240,7 @@ jobs: needs: - Smoke-stamping if: needs.Smoke-stamping.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -258,7 +258,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -277,7 +277,7 @@ jobs: needs: - Smoke-stamping if: needs.Smoke-stamping.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -295,7 +295,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -314,7 +314,7 @@ jobs: needs: - Smoke-stamping if: needs.Smoke-stamping.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -332,7 +332,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -351,7 +351,7 @@ jobs: needs: - Smoke-stamping if: needs.Smoke-stamping.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -369,7 +369,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -388,7 +388,7 @@ jobs: needs: - Smoke-control if: needs.Smoke-control.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -406,7 +406,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -425,7 +425,7 @@ jobs: needs: - Smoke-control if: needs.Smoke-control.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -443,7 +443,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -462,7 +462,7 @@ jobs: needs: - Smoke-control if: needs.Smoke-control.result == 'success' - runs-on: ubuntu-latest + runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false matrix: @@ -480,7 +480,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml index ddb065dbe48..9078d214ff2 100644 --- a/.github/workflows/semgrep.yml +++ b/.github/workflows/semgrep.yml @@ -15,7 +15,7 @@ name: Semgrep jobs: semgrep: name: Scan - runs-on: ubuntu-20.04 + runs-on: blacksmith-4vcpu-ubuntu-2204 env: SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} container: From ddc9bac87bff11ad199d2260f1f5dae563da4fd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pacheco?= Date: Fri, 13 Sep 2024 05:17:00 -0400 Subject: [PATCH 0815/1051] Fixes #9119: inject dispatch_uid for retry-wrapped receivers (#9247) * Fixes #9119: inject dispatch_uid for retry-wrapped receivers - edited _make_lookup_key instead of _make_id, doesn't seem to be much of a difference, but I wanted the change as far up the stack as possible so devs can see it sooner - we can potentially also use functools.wraps and `__wrapped__` (i.e., `functools.wraps(fun)(retry_over_time)`, but this is a bit too generic for this type of solution, which may cause other issues * linting --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz --- celery/utils/dispatch/signal.py | 4 ++++ t/unit/utils/test_dispatcher.py | 3 --- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/celery/utils/dispatch/signal.py b/celery/utils/dispatch/signal.py index 0cfa6127ed0..ad8047e6bd7 100644 --- a/celery/utils/dispatch/signal.py +++ b/celery/utils/dispatch/signal.py @@ -54,6 +54,9 @@ def _boundmethod_safe_weakref(obj): def _make_lookup_key(receiver, sender, dispatch_uid): if dispatch_uid: return (dispatch_uid, _make_id(sender)) + # Issue #9119 - retry-wrapped functions use the underlying function for dispatch_uid + elif hasattr(receiver, '_dispatch_uid'): + return (receiver._dispatch_uid, _make_id(sender)) else: return (_make_id(receiver), _make_id(sender)) @@ -170,6 +173,7 @@ def on_error(exc, intervals, retries): # it up later with the original func id options['dispatch_uid'] = _make_id(fun) fun = _retry_receiver(fun) + fun._dispatch_uid = options['dispatch_uid'] self._connect_signal(fun, sender, options['weak'], options['dispatch_uid']) diff --git a/t/unit/utils/test_dispatcher.py b/t/unit/utils/test_dispatcher.py index 07ee2216dc9..0de48531af0 100644 --- a/t/unit/utils/test_dispatcher.py +++ b/t/unit/utils/test_dispatcher.py @@ -2,8 +2,6 @@ import sys import time -import pytest - from celery.utils.dispatch import Signal if sys.platform.startswith('java'): @@ -185,7 +183,6 @@ def test_boundmethod(self): garbage_collect() self._testIsClean(a_signal) - @pytest.mark.xfail(reason="Issue #9119") def test_disconnect_retryable_decorator(self): # Regression test for https://github.com/celery/celery/issues/9119 From d4cb536f0c189cee91ebb577c86042fd05d31fd0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 13 Sep 2024 14:14:01 +0300 Subject: [PATCH 0816/1051] Run all smoke tests CI jobs together (#9263) --- .github/workflows/python-package.yml | 28 ++++++++++++++-------------- tox.ini | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b2716578571..5889dc7caf3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -238,8 +238,8 @@ jobs: Smoke-canvas: needs: - - Smoke-stamping - if: needs.Smoke-stamping.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -275,8 +275,8 @@ jobs: Smoke-consumer: needs: - - Smoke-stamping - if: needs.Smoke-stamping.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -312,8 +312,8 @@ jobs: Smoke-control: needs: - - Smoke-stamping - if: needs.Smoke-stamping.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -349,8 +349,8 @@ jobs: Smoke-signals: needs: - - Smoke-stamping - if: needs.Smoke-stamping.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -386,8 +386,8 @@ jobs: Smoke-tasks: needs: - - Smoke-control - if: needs.Smoke-control.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -423,8 +423,8 @@ jobs: Smoke-thread_safe: needs: - - Smoke-control - if: needs.Smoke-control.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -460,8 +460,8 @@ jobs: Smoke-worker: needs: - - Smoke-control - if: needs.Smoke-control.result == 'success' + - Integration + if: needs.Integration.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false diff --git a/tox.ini b/tox.ini index d31c7b2932f..2c2f5992891 100644 --- a/tox.ini +++ b/tox.ini @@ -45,7 +45,7 @@ deps= commands = unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsvv t/integration {posargs} - smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 10 --reruns-delay 60 --rerun-except AssertionError {posargs} + smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 3 --reruns-delay 30 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 235368ceb2416fb0484b41d14484622a1c39fc24 Mon Sep 17 00:00:00 2001 From: Kyle Zurawski <39493372+kylez-ithaka@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:10:42 -0400 Subject: [PATCH 0817/1051] Improve documentation on visibility timeout (#9264) Need to set ALL the values (not clear in documentation still) for it to take effect, and conflicts between apps cause issues Per https://github.com/celery/celery/issues/7651 and https://github.com/celery/celery/issues/5935 --- docs/getting-started/backends-and-brokers/redis.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 7b658f5d906..997431b895f 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -188,8 +188,8 @@ a more distant future, database-backed periodic task might be a better choice. Periodic tasks won't be affected by the visibility timeout, as this is a concept separate from ETA/countdown. -You can increase this timeout by configuring several options -with the same name: +You can increase this timeout by configuring all of the following options +with the same name (required to set all of them): .. code-block:: python @@ -199,6 +199,9 @@ with the same name: The value must be an int describing the number of seconds. +Note: If multiple applications are sharing the same Broker, with different settings, the _shortest_ value will be used. +This include if the value is not set, and the default is sent + Soft Shutdown ------------- From 3c14ea8426521d8fcf82a85d1ef0bab0a4a68c98 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 15 Sep 2024 03:14:31 +0300 Subject: [PATCH 0818/1051] Bump pytest-celery to 1.1.2 (#9267) * Bump pytest-celery to 1.1.2 * Revert "Added docker cleanup auto-fixture to improve smoke tests stability (#9243)" This reverts commit 90feae37357d9f87b01f0784c0d28dec23c5eb1f. * Marked xfail for test_prefetch_count_restored with Redis - flaky test * Marked xfail for test_max_prefetch_not_passed_on_broker_restart with Redis - flaky test --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/conftest.py | 51 ---------------------------------- t/smoke/tests/test_consumer.py | 8 +++++- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 6 files changed, 11 insertions(+), 56 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 63ab64727e2..f77db5bccc3 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]>=1.1.1 +pytest-celery[all]>=1.1.2 diff --git a/requirements/test.txt b/requirements/test.txt index 87d5d7c0e26..7719f7877db 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.3.3 -pytest-celery[all]>=1.1.1 +pytest-celery[all]>=1.1.2 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 pytest-timeout==2.3.1 diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index a9ddd9e54d6..073821c61b2 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -5,7 +5,6 @@ RedisContainer) from pytest_docker_tools import container, fetch -import docker from celery import Celery from t.smoke.operations.task_termination import TaskTermination from t.smoke.operations.worker_kill import WorkerKill @@ -91,53 +90,3 @@ def default_worker_app(default_worker_app: Celery) -> Celery: if app.conf.broker_url and app.conf.broker_url.startswith("sqs"): app.conf.broker_transport_options["region"] = LOCALSTACK_CREDS["AWS_DEFAULT_REGION"] return app - - -@pytest.fixture(scope="module", autouse=True) -def auto_clean_docker_resources(): - """Clean up Docker resources after each test module.""" - # Used for debugging - verbose = False - - def log(message): - if verbose: - print(message) - - def cleanup_docker_resources(): - """Function to clean up Docker containers, networks, and volumes based on labels.""" - docker_client = docker.from_env() - - try: - # Clean up containers with the label 'creator=pytest-docker-tools' - containers = docker_client.containers.list(all=True, filters={"label": "creator=pytest-docker-tools"}) - for con in containers: - con.reload() # Ensure we have the latest status - if con.status != "running": # Only remove non-running containers - log(f"Removing container {con.name}") - con.remove(force=True) - else: - log(f"Skipping running container {con.name}") - - # Clean up networks with names starting with 'pytest-' - networks = docker_client.networks.list(names=["pytest-*"]) - for network in networks: - if not network.containers: # Check if the network is in use - log(f"Removing network {network.name}") - network.remove() - else: - log(f"Skipping network {network.name}, still in use") - - # Clean up volumes with names starting with 'pytest-*' - volumes = docker_client.volumes.list(filters={"name": "pytest-*"}) - for volume in volumes: - if not volume.attrs.get("UsageData", {}).get("RefCount", 0): # Check if volume is not in use - log(f"Removing volume {volume.name}") - volume.remove() - else: - log(f"Skipping volume {volume.name}, still in use") - - except Exception as e: - log(f"Error occurred while cleaning up Docker resources: {e}") - - log("--- Running Docker resource cleanup ---") - cleanup_docker_resources() diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 28f67207ab8..0c6e9372d09 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -1,5 +1,5 @@ import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, RedisTestBroker from celery import Celery from celery.canvas import chain, group @@ -57,6 +57,9 @@ def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_r celery_setup.worker.assert_log_exists(expected_prefetch_restore_message) def test_prefetch_count_restored(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + # When running in debug it works, when running from CLI it sometimes works + pytest.xfail("Test is flaky with Redis broker") expected_running_tasks_count = MAX_PREFETCH * WORKER_PREFETCH_MULTIPLIER sig = group(long_running_task.s(10) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) @@ -95,6 +98,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_not_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + # When running in debug it works, when running from CLI it sometimes works + pytest.xfail("Test is flaky with Redis broker") sig = group(long_running_task.s(10) for _ in range(WORKER_CONCURRENCY)) r = sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index d9e5ee82fef..2a8709b6619 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -39,7 +39,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache,pydantic,sqs] \ - pytest-celery>=1.1.1 + pytest-celery>=1.1.2 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index a47a2986373..981438e0e04 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -38,7 +38,7 @@ EXPOSE 5678 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery[sqs]>=1.1.1 \ + pytest-celery[sqs]>=1.1.2 \ pydantic>=2.4 # The workdir must be /app From ddf2ae124ae0b19ee0255ae3f84339a3d72b0d98 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 15 Sep 2024 04:44:53 +0300 Subject: [PATCH 0819/1051] Added missing "app.conf.visibility_timeout" in smoke tests (#9266) --- t/smoke/tests/test_consumer.py | 1 + t/smoke/tests/test_worker.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 0c6e9372d09..4151613027d 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -25,6 +25,7 @@ def default_worker_app(default_worker_app: Celery) -> Celery: "visibility_timeout": 1, "polling_interval": 1, } + app.conf.visibility_timeout = 1 return app diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 6b7892a24d3..420dad97335 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -276,6 +276,8 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: "visibility_timeout": 3600, # 1 hour "polling_interval": 1, } + app.conf.result_backend_transport_options = {'visibility_timeout': 3600} + app.conf.visibility_timeout = 3600 return app def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSetup): From d994e054d7761f1d7a3634b43c2e03fa4c45e938 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 15 Sep 2024 16:17:16 +0300 Subject: [PATCH 0820/1051] Improved stability with t/smoke/tests/test_consumer.py (#9268) --- t/smoke/tests/test_consumer.py | 9 ++++++--- t/smoke/tests/test_worker.py | 9 ++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 4151613027d..985b71c2edf 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -15,17 +15,17 @@ def default_worker_app(default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.worker_prefetch_multiplier = WORKER_PREFETCH_MULTIPLIER app.conf.worker_concurrency = WORKER_CONCURRENCY + app.conf.visibility_timeout = 3600 if app.conf.broker_url.startswith("redis"): app.conf.broker_transport_options = { - "visibility_timeout": 1, + "visibility_timeout": app.conf.visibility_timeout, "polling_interval": 1, } if app.conf.result_backend.startswith("redis"): app.conf.result_backend_transport_options = { - "visibility_timeout": 1, + "visibility_timeout": app.conf.visibility_timeout, "polling_interval": 1, } - app.conf.visibility_timeout = 1 return app @@ -81,6 +81,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: return app def test_max_prefetch_passed_on_broker_restart(self, celery_setup: CeleryTestSetup): + if isinstance(celery_setup.broker, RedisTestBroker): + # When running in debug it works, when running from CLI it sometimes works + pytest.xfail("Test is flaky with Redis broker") sig = group(long_running_task.s(420) for _ in range(WORKER_CONCURRENCY)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 420dad97335..973a72a5fcf 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -272,12 +272,15 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: app = default_worker_app app.conf.prefetch_multiplier = 2 app.conf.worker_concurrency = 10 + app.conf.visibility_timeout = 3600 # 1 hour app.conf.broker_transport_options = { - "visibility_timeout": 3600, # 1 hour + "visibility_timeout": app.conf.visibility_timeout, + "polling_interval": 1, + } + app.conf.result_backend_transport_options = { + "visibility_timeout": app.conf.visibility_timeout, "polling_interval": 1, } - app.conf.result_backend_transport_options = {'visibility_timeout': 3600} - app.conf.visibility_timeout = 3600 return app def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSetup): From 0a69609281d5434e7d49a2253a7324406ac17599 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 15 Sep 2024 22:02:20 +0300 Subject: [PATCH 0821/1051] Improved Redis container stability in the smoke tests (#9271) --- t/smoke/conftest.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 073821c61b2..c286b4abf2f 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -3,7 +3,7 @@ import pytest from pytest_celery import (LOCALSTACK_CREDS, REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, RedisContainer) -from pytest_docker_tools import container, fetch +from pytest_docker_tools import container, fetch, fxtr from celery import Celery from t.smoke.operations.task_termination import TaskTermination @@ -54,6 +54,7 @@ def default_worker_tasks(default_worker_tasks: set) -> set: network="{default_pytest_celery_network.name}", wrapper_class=RedisContainer, timeout=REDIS_CONTAINER_TIMEOUT, + command=fxtr("default_redis_broker_command"), ) @@ -90,3 +91,27 @@ def default_worker_app(default_worker_app: Celery) -> Celery: if app.conf.broker_url and app.conf.broker_url.startswith("sqs"): app.conf.broker_transport_options["region"] = LOCALSTACK_CREDS["AWS_DEFAULT_REGION"] return app + + +# Override the default redis broker container from pytest-celery +default_redis_broker = container( + image="{default_redis_broker_image}", + ports=fxtr("default_redis_broker_ports"), + environment=fxtr("default_redis_broker_env"), + network="{default_pytest_celery_network.name}", + wrapper_class=RedisContainer, + timeout=REDIS_CONTAINER_TIMEOUT, + command=fxtr("default_redis_broker_command"), +) + + +# Override the default redis backend container from pytest-celery +default_redis_backend = container( + image="{default_redis_backend_image}", + ports=fxtr("default_redis_backend_ports"), + environment=fxtr("default_redis_backend_env"), + network="{default_pytest_celery_network.name}", + wrapper_class=RedisContainer, + timeout=REDIS_CONTAINER_TIMEOUT, + command=fxtr("default_redis_backend_command"), +) From c885c9e3dbd3ebeea6d7ae389916856d76c89f80 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 03:44:01 +0300 Subject: [PATCH 0822/1051] Disabled EXHAUST_MEMORY tests in Smoke-tasks (#9272) --- t/smoke/tests/test_tasks.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/t/smoke/tests/test_tasks.py b/t/smoke/tests/test_tasks.py index 4175f0d21cb..2713e15b1c0 100644 --- a/t/smoke/tests/test_tasks.py +++ b/t/smoke/tests/test_tasks.py @@ -26,7 +26,8 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: (TaskTermination.Method.SIGKILL, WorkerLostError), (TaskTermination.Method.SYSTEM_EXIT, WorkerLostError), (TaskTermination.Method.DELAY_TIMEOUT, TimeLimitExceeded), - (TaskTermination.Method.EXHAUST_MEMORY, WorkerLostError), + # Exhausting the memory messes up the CI environment + # (TaskTermination.Method.EXHAUST_MEMORY, WorkerLostError), ], ) def test_child_process_respawn( @@ -86,11 +87,12 @@ def wait_for_two_celery_processes(): "Hard time limit (2s) exceeded for t.smoke.tasks.self_termination_delay_timeout", "TimeLimitExceeded(2,)", ), - ( - TaskTermination.Method.EXHAUST_MEMORY, - "Worker exited prematurely: signal 9 (SIGKILL)", - None, - ), + # Exhausting the memory messes up the CI environment + # ( + # TaskTermination.Method.EXHAUST_MEMORY, + # "Worker exited prematurely: signal 9 (SIGKILL)", + # None, + # ), ], ) def test_terminated_task_logs_correct_error( From 1967d5600ad7679883adec23c00990a9c7d55edf Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 10:59:28 +0300 Subject: [PATCH 0823/1051] Marked xfail for test_reducing_prefetch_count with Redis - flaky test (#9273) --- t/smoke/tests/test_consumer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/t/smoke/tests/test_consumer.py b/t/smoke/tests/test_consumer.py index 985b71c2edf..bd1f1e14f8a 100644 --- a/t/smoke/tests/test_consumer.py +++ b/t/smoke/tests/test_consumer.py @@ -38,6 +38,9 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: @pytest.mark.parametrize("expected_running_tasks_count", range(1, WORKER_CONCURRENCY + 1)) def test_reducing_prefetch_count(self, celery_setup: CeleryTestSetup, expected_running_tasks_count: int): + if isinstance(celery_setup.broker, RedisTestBroker): + # When running in debug it works, when running from CLI it sometimes works + pytest.xfail("Test is flaky with Redis broker") sig = group(long_running_task.s(420) for _ in range(expected_running_tasks_count)) sig.apply_async(queue=celery_setup.worker.worker_queue) celery_setup.broker.restart() From e637e1bdfb943324c6298aea8f29be3f4234336f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 13:14:32 +0300 Subject: [PATCH 0824/1051] Fixed pypy unit tests random failures in the CI (#9275) --- t/unit/backends/test_gcs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py index c9ca167c22f..99f8e09f6d2 100644 --- a/t/unit/backends/test_gcs.py +++ b/t/unit/backends/test_gcs.py @@ -145,7 +145,7 @@ def test_mget(self, mock_get, base_path): backend = GCSBackend(app=self.app) mock_get.side_effect = ['value1', 'value2'] result = backend.mget([b'key1', b'key2']) - mock_get.assert_has_calls([call(b'key1'), call(b'key2')]) + mock_get.assert_has_calls([call(b'key1'), call(b'key2')], any_order=True) assert result == ['value1', 'value2'] @patch('celery.backends.gcs.Client') From 4a3f771e41ee085c70d670cb52c5458739753fd0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 17:10:41 +0300 Subject: [PATCH 0825/1051] Fixed more pypy unit tests random failures in the CI (#9278) --- t/unit/backends/test_gcs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py index 99f8e09f6d2..8ebfbc9aa58 100644 --- a/t/unit/backends/test_gcs.py +++ b/t/unit/backends/test_gcs.py @@ -146,7 +146,7 @@ def test_mget(self, mock_get, base_path): mock_get.side_effect = ['value1', 'value2'] result = backend.mget([b'key1', b'key2']) mock_get.assert_has_calls([call(b'key1'), call(b'key2')], any_order=True) - assert result == ['value1', 'value2'] + assert sorted(result) == sorted(['value1', 'value2']) @patch('celery.backends.gcs.Client') @patch('celery.backends.gcs.getpid') From c6f0a6c443cd70b25163d788c0acb0f7829293b9 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 19:14:23 +0300 Subject: [PATCH 0826/1051] Fix Redis container from aborting randomly (#9276) * [TMP] removed unit/int tests * sudo sysctl -w vm.overcommit_memory=1 * Use redis.conf for the redis containers in the smoke tests * Changed Smoke-stamping max reruns from 3 to 5 in the CI only * Revert "[TMP] removed unit/int tests" This reverts commit 3376b82660bd2f26791c82f9faa166b52371d743. --- .github/workflows/python-package.yml | 22 +++++++++++++++++++++- t/smoke/conftest.py | 27 ++++++++++++++++++++++++--- t/smoke/redis.conf | 5 +++++ 3 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 t/smoke/redis.conf diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 5889dc7caf3..a06d56b4d57 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -144,6 +144,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -181,6 +183,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -218,6 +222,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -234,7 +240,7 @@ jobs: timeout-minutes: 30 run: > tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k stamping + "${{ matrix.python-version }}-smoke" -- -n auto -k stamping --reruns 5 Smoke-canvas: needs: @@ -255,6 +261,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -292,6 +300,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -329,6 +339,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -366,6 +378,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -403,6 +417,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -440,6 +456,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -477,6 +495,8 @@ jobs: - name: Install apt packages run: | sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index c286b4abf2f..4be447d414d 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -46,6 +46,9 @@ def default_worker_tasks(default_worker_tasks: set) -> set: # we use pytest-celery to raise a dedicated Redis container for the smoke tests suite that is configured # to be used by the integration tests tasks. +redis_command = RedisContainer.command() +redis_command.insert(1, "/usr/local/etc/redis/redis.conf") + redis_image = fetch(repository=REDIS_IMAGE) redis_test_container: RedisContainer = container( image="{redis_image.id}", @@ -54,7 +57,13 @@ def default_worker_tasks(default_worker_tasks: set) -> set: network="{default_pytest_celery_network.name}", wrapper_class=RedisContainer, timeout=REDIS_CONTAINER_TIMEOUT, - command=fxtr("default_redis_broker_command"), + command=redis_command, + volumes={ + os.path.abspath("t/smoke/redis.conf"): { + "bind": "/usr/local/etc/redis/redis.conf", + "mode": "ro", # Mount as read-only + } + }, ) @@ -101,7 +110,13 @@ def default_worker_app(default_worker_app: Celery) -> Celery: network="{default_pytest_celery_network.name}", wrapper_class=RedisContainer, timeout=REDIS_CONTAINER_TIMEOUT, - command=fxtr("default_redis_broker_command"), + command=redis_command, + volumes={ + os.path.abspath("t/smoke/redis.conf"): { + "bind": "/usr/local/etc/redis/redis.conf", + "mode": "ro", # Mount as read-only + } + }, ) @@ -113,5 +128,11 @@ def default_worker_app(default_worker_app: Celery) -> Celery: network="{default_pytest_celery_network.name}", wrapper_class=RedisContainer, timeout=REDIS_CONTAINER_TIMEOUT, - command=fxtr("default_redis_backend_command"), + command=redis_command, + volumes={ + os.path.abspath("t/smoke/redis.conf"): { + "bind": "/usr/local/etc/redis/redis.conf", + "mode": "ro", # Mount as read-only + } + }, ) diff --git a/t/smoke/redis.conf b/t/smoke/redis.conf new file mode 100644 index 00000000000..d39f39cf5d8 --- /dev/null +++ b/t/smoke/redis.conf @@ -0,0 +1,5 @@ +bind 0.0.0.0 +protected-mode no +save "" +appendonly no +maxmemory-policy noeviction From eb16afb27ead3225203c76c147cb835b66a47239 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 20:00:48 +0300 Subject: [PATCH 0827/1051] Run Integration & Smoke CI tests together after unit tests passes (#9280) --- .github/workflows/python-package.yml | 40 ++++++++++++++-------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a06d56b4d57..086040a34b7 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -127,8 +127,8 @@ jobs: Smoke-failover: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -166,8 +166,8 @@ jobs: Smoke-quorum_queues: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -205,8 +205,8 @@ jobs: Smoke-stamping: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -244,8 +244,8 @@ jobs: Smoke-canvas: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -283,8 +283,8 @@ jobs: Smoke-consumer: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -322,8 +322,8 @@ jobs: Smoke-control: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -361,8 +361,8 @@ jobs: Smoke-signals: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -400,8 +400,8 @@ jobs: Smoke-tasks: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -439,8 +439,8 @@ jobs: Smoke-thread_safe: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false @@ -478,8 +478,8 @@ jobs: Smoke-worker: needs: - - Integration - if: needs.Integration.result == 'success' + - Unit + if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: fail-fast: false From 67f8eeb8c0fac33a67b14415efd95b98eab23339 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 16 Sep 2024 22:10:43 +0300 Subject: [PATCH 0828/1051] Added "loglevel verbose" to Redis containers in smoke tests (#9282) --- t/smoke/redis.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/t/smoke/redis.conf b/t/smoke/redis.conf index d39f39cf5d8..74b528c2558 100644 --- a/t/smoke/redis.conf +++ b/t/smoke/redis.conf @@ -3,3 +3,4 @@ protected-mode no save "" appendonly no maxmemory-policy noeviction +loglevel verbose From 11344d9b9641be8cd2ec5b3ae187379d630d1fab Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 01:45:33 +0300 Subject: [PATCH 0829/1051] Fixed Redis error in the smoke tests: "Possible SECURITY ATTACK detected" (#9284) --- t/smoke/tests/test_canvas.py | 21 +++------------------ t/smoke/tests/test_worker.py | 13 +------------ 2 files changed, 4 insertions(+), 30 deletions(-) diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 3e146adf351..02fbe9334f8 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -1,23 +1,12 @@ import uuid import pytest -from pytest_celery import (ALL_CELERY_BROKERS, CELERY_LOCALSTACK_BROKER, RESULT_TIMEOUT, CeleryTestBroker, - CeleryTestSetup, _is_vendor_installed) +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup from celery.canvas import chain, chord, group, signature from t.integration.conftest import get_redis_connection from t.integration.tasks import ExpectedException, add, fail, identity, redis_echo -if _is_vendor_installed("localstack"): - ALL_CELERY_BROKERS.add(CELERY_LOCALSTACK_BROKER) - - -@pytest.fixture(params=ALL_CELERY_BROKERS) -def celery_broker(request: pytest.FixtureRequest) -> CeleryTestBroker: # type: ignore - broker: CeleryTestBroker = request.getfixturevalue(request.param) - yield broker - broker.teardown() - class test_signature: def test_sanity(self, celery_setup: CeleryTestSetup): @@ -59,9 +48,7 @@ def test_chain_gets_last_task_id_with_failing_tasks_in_chain(self, celery_setup: identity.si("end").set(queue=queue), ) res = sig.apply_async() - celery_setup.worker.assert_log_does_not_exist( - "ValueError: task_id must not be empty. Got None instead." - ) + celery_setup.worker.assert_log_does_not_exist("ValueError: task_id must not be empty. Got None instead.") with pytest.raises(ExpectedException): res.get(timeout=RESULT_TIMEOUT) @@ -72,9 +59,7 @@ def test_upgrade_to_chord_inside_chains(self, celery_setup: CeleryTestSetup): group1 = group(redis_echo.si("a", redis_key), redis_echo.si("a", redis_key)) group2 = group(redis_echo.si("a", redis_key), redis_echo.si("a", redis_key)) chord1 = group1 | group2 - chain1 = chain( - chord1, (redis_echo.si("a", redis_key) | redis_echo.si("b", redis_key).set(queue=queue)) - ) + chain1 = chain(chord1, (redis_echo.si("a", redis_key) | redis_echo.si("b", redis_key).set(queue=queue))) chain1.apply_async(queue=queue).get(timeout=RESULT_TIMEOUT) redis_connection = get_redis_connection() actual = redis_connection.lrange(redis_key, 0, -1) diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 973a72a5fcf..35baf66015b 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -1,8 +1,7 @@ from time import sleep import pytest -from pytest_celery import (ALL_CELERY_BROKERS, CELERY_LOCALSTACK_BROKER, RESULT_TIMEOUT, CeleryTestBroker, - CeleryTestSetup, CeleryTestWorker, RabbitMQTestBroker, _is_vendor_installed) +from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, RabbitMQTestBroker import celery from celery import Celery @@ -10,16 +9,6 @@ from t.smoke.conftest import SuiteOperations, WorkerKill, WorkerRestart from t.smoke.tasks import long_running_task -if _is_vendor_installed("localstack"): - ALL_CELERY_BROKERS.add(CELERY_LOCALSTACK_BROKER) - - -@pytest.fixture(params=ALL_CELERY_BROKERS) -def celery_broker(request: pytest.FixtureRequest) -> CeleryTestBroker: # type: ignore - broker: CeleryTestBroker = request.getfixturevalue(request.param) - yield broker - broker.teardown() - def assert_container_exited(worker: CeleryTestWorker, attempts: int = RESULT_TIMEOUT): """It might take a few moments for the container to exit after the worker is killed.""" From 958299502296726363eb09f12be361554b5b2f57 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 01:50:06 +0300 Subject: [PATCH 0830/1051] Refactored the smoke tests github workflow (#9285) --- .github/workflows/python-package.yml | 429 +++------------------------ 1 file changed, 46 insertions(+), 383 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 086040a34b7..c92ab1ebd69 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -124,393 +124,56 @@ jobs: run: > tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv - - Smoke-failover: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k failover - - Smoke-quorum_queues: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 20 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k quorum_queues - - Smoke-stamping: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k stamping --reruns 5 - - Smoke-canvas: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_canvas.py - - Smoke-consumer: + Smoke: needs: - Unit if: needs.Unit.result == 'success' runs-on: blacksmith-4vcpu-ubuntu-2204 strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_consumer.py - - Smoke-control: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_control.py - - Smoke-signals: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_signals.py - - Smoke-tasks: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_tasks.py - - Smoke-thread_safe: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] - - steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_thread_safe.py - - Smoke-worker: - needs: - - Unit - if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 - strategy: - fail-fast: false - matrix: - python-version: ['3.8', '3.12'] + fail-fast: false + matrix: + python-version: ['3.8', '3.12'] + test-case: [ + 'failover', + 'quorum_queues', + 'stamping', + 'canvas', + 'consumer', + 'control', + 'signals', + 'tasks', + 'thread_safe', + 'worker' + ] steps: - - name: Fetch Docker Images - run: | - docker pull redis:latest - docker pull rabbitmq:latest - - - name: Install apt packages - run: | - sudo apt update - sudo apt-get install -y procps # Install procps to enable sysctl - sudo sysctl -w vm.overcommit_memory=1 - - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: useblacksmith/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - cache-dependency-path: '**/setup.py' - - name: Install tox - run: python -m pip install --upgrade pip tox tox-gh-actions - - name: > - Run tox for - "${{ matrix.python-version }}-smoke" - timeout-minutes: 30 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-smoke" -- -n auto -k test_worker.py + - name: Fetch Docker Images + run: | + docker pull redis:latest + docker pull rabbitmq:latest + + - name: Install apt packages + run: | + sudo apt update + sudo apt-get install -y procps # Install procps to enable sysctl + sudo sysctl -w vm.overcommit_memory=1 + + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: useblacksmith/setup-python@v6 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: '**/setup.py' + + - name: Install tox + run: python -m pip install --upgrade pip tox tox-gh-actions + + - name: Run tox for "${{ matrix.python-version }}-smoke-${{ matrix.test-case }}" + timeout-minutes: 30 + run: | + if [ "${{ matrix.test-case }}" == "stamping" ]; then + tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} --reruns 5 + else + tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} + fi From f51c2bd8c175f5ea235da684aed870e5884fa941 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 01:52:44 +0300 Subject: [PATCH 0831/1051] Increased --reruns 3->4 in smoke tests (#9286) --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2c2f5992891..ea3b7d58384 100644 --- a/tox.ini +++ b/tox.ini @@ -45,7 +45,7 @@ deps= commands = unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsvv t/integration {posargs} - smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 3 --reruns-delay 30 --rerun-except AssertionError {posargs} + smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 4 --reruns-delay 30 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 88c66aae00cae36fe8a0a0bf5ecd3d084601c78a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 14:27:54 +0300 Subject: [PATCH 0832/1051] Improve stability of smoke tests (CI and Local) (#9287) --- .github/workflows/python-package.yml | 8 ++------ tox.ini | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c92ab1ebd69..18073baff9d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -170,10 +170,6 @@ jobs: run: python -m pip install --upgrade pip tox tox-gh-actions - name: Run tox for "${{ matrix.python-version }}-smoke-${{ matrix.test-case }}" - timeout-minutes: 30 + timeout-minutes: 60 run: | - if [ "${{ matrix.test-case }}" == "stamping" ]; then - tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} --reruns 5 - else - tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} - fi + tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} diff --git a/tox.ini b/tox.ini index ea3b7d58384..f8315332cf0 100644 --- a/tox.ini +++ b/tox.ini @@ -45,7 +45,7 @@ deps= commands = unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsvv t/integration {posargs} - smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 4 --reruns-delay 30 --rerun-except AssertionError {posargs} + smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 5 --reruns-delay 60 --rerun-except AssertionError {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 889fcd3b7b678bb85a779a810ec728a75c45571a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 16:34:14 +0300 Subject: [PATCH 0833/1051] Fixed Smoke tests CI "test-case" lables (specific instead of general) (#9288) --- .github/workflows/python-package.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 18073baff9d..0bc68e7fb6b 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -137,13 +137,13 @@ jobs: 'failover', 'quorum_queues', 'stamping', - 'canvas', - 'consumer', - 'control', - 'signals', - 'tasks', - 'thread_safe', - 'worker' + 'test_canvas.py', + 'test_consumer.py', + 'test_control.py', + 'test_signals.py', + 'test_tasks.py', + 'test_thread_safe.py', + 'test_worker.py' ] steps: From b9624da9bd8a763bc39c5eca4024c015764aaf7b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 17:24:47 +0300 Subject: [PATCH 0834/1051] Use assert_log_exists instead of wait_for_log in worker smoke tests (#9290) --- t/smoke/tests/test_worker.py | 122 +++++++++++++++++------------------ 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 35baf66015b..0a415d8f892 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -74,10 +74,10 @@ def test_warm_shutdown(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log("worker: Warm shutdown (MainProcess)") - worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + worker.assert_log_exists("worker: Warm shutdown (MainProcess)") + worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -88,10 +88,10 @@ def test_multiple_warm_shutdown_does_nothing(self, celery_setup: CeleryTestSetup sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") for _ in range(3): self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log(f"long_running_task[{res.id}] succeeded") + worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -102,9 +102,9 @@ def test_cold_shutdown(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) @@ -118,13 +118,13 @@ def test_hard_shutdown_from_warm(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(420, verbose=True).set(queue=queue) sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) self.kill_worker(worker, WorkerKill.Method.SIGQUIT) self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log("worker: Warm shutdown (MainProcess)") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Warm shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -134,11 +134,11 @@ def test_hard_shutdown_from_cold(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(420, verbose=True).set(queue=queue) sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -154,9 +154,9 @@ def test_cold_shutdown(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) @@ -167,11 +167,11 @@ def test_hard_shutdown_from_cold(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(420, verbose=True).set(queue=queue) sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -189,14 +189,14 @@ def test_soft_shutdown(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds", timeout=5, ) - worker.wait_for_log(f"long_running_task[{res.id}] succeeded") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -207,11 +207,11 @@ def test_hard_shutdown_from_soft(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(420, verbose=True).set(queue=queue) sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log("Waiting gracefully for cold shutdown to complete...") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Waiting gracefully for cold shutdown to complete...") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) assert_container_exited(worker) @@ -229,13 +229,13 @@ def test_soft_shutdown(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log(f"long_running_task[{res.id}] succeeded") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -246,11 +246,11 @@ def test_hard_shutdown_from_soft(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(420, verbose=True).set(queue=queue) sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log("Waiting gracefully for cold shutdown to complete...") - worker.wait_for_log("worker: Cold shutdown (MainProcess)", timeout=5) + worker.assert_log_exists("Waiting gracefully for cold shutdown to complete...") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)", timeout=5) self.kill_worker(worker, WorkerKill.Method.SIGTERM) assert_container_exited(worker) @@ -282,13 +282,13 @@ def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSe sig = long_running_task.si(15, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 1 unacknowledged message(s)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) worker.restart() assert res.get(RESULT_TIMEOUT) @@ -307,15 +307,15 @@ def test_soft_shutdown_reset_visibility_timeout_group_one_finish(self, celery_se sig = group(short_task, long_task) sig.delay() - worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") - worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{short_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{long_task_res.id}] received") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log(f"long_running_task[{short_task_res.id}] succeeded") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 1 unacknowledged message(s)") + worker.assert_log_exists(f"long_running_task[{short_task_res.id}] succeeded") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) assert short_task_res.get(RESULT_TIMEOUT) @@ -333,14 +333,14 @@ def test_soft_shutdown_reset_visibility_timeout_group_none_finish(self, celery_s sig = group(short_task, long_task) res = sig.delay() - worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") - worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{short_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{long_task_res.id}] received") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 2 unacknowledged message(s)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 2 unacknowledged message(s)") assert_container_exited(worker) worker.restart() assert res.get(RESULT_TIMEOUT) == [True, True] @@ -363,13 +363,13 @@ def test_soft_shutdown_reset_visibility_timeout(self, celery_setup: CeleryTestSe sig = long_running_task.si(15, verbose=True).set(queue=queue) res = sig.delay() - worker.wait_for_log("Starting long running task") + worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 1 unacknowledged message(s)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) worker.restart() assert res.get(RESULT_TIMEOUT) @@ -391,15 +391,15 @@ def test_soft_shutdown_reset_visibility_timeout_group_one_finish( sig = group(short_task, long_task) sig.delay() - worker.wait_for_log(f"long_running_task[{short_task_res.id}] received") - worker.wait_for_log(f"long_running_task[{long_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{short_task_res.id}] received") + worker.assert_log_exists(f"long_running_task[{long_task_res.id}] received") self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log(f"long_running_task[{short_task_res.id}] succeeded") - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 1 unacknowledged message(s)") + worker.assert_log_exists(f"long_running_task[{short_task_res.id}] succeeded") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) assert short_task_res.get(RESULT_TIMEOUT) @@ -415,10 +415,10 @@ def test_soft_shutdown(self, celery_setup: CeleryTestSetup): worker = celery_setup.worker self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds", ) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -432,13 +432,13 @@ def test_soft_shutdown_eta(self, celery_setup: CeleryTestSetup): sig = long_running_task.si(5, verbose=True).set(queue=queue) res = sig.apply_async(countdown=app.conf.worker_soft_shutdown_timeout + 5) - worker.wait_for_log(f"long_running_task[{res.id}] received") + worker.assert_log_exists(f"long_running_task[{res.id}] received") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) - worker.wait_for_log( + worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.wait_for_log("worker: Cold shutdown (MainProcess)") - worker.wait_for_log("Restoring 1 unacknowledged message(s)") + worker.assert_log_exists("worker: Cold shutdown (MainProcess)") + worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) worker.restart() assert res.get(RESULT_TIMEOUT) From 06ef4421daf3eb7b20d417d09c31b702814311af Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 18:14:48 +0300 Subject: [PATCH 0835/1051] Optimized t/smoke/tests/test_worker.py (#9291) --- t/smoke/tests/test_worker.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/t/smoke/tests/test_worker.py b/t/smoke/tests/test_worker.py index 0a415d8f892..2165f4296af 100644 --- a/t/smoke/tests/test_worker.py +++ b/t/smoke/tests/test_worker.py @@ -1,7 +1,7 @@ from time import sleep import pytest -from pytest_celery import RESULT_TIMEOUT, CeleryTestSetup, CeleryTestWorker, RabbitMQTestBroker +from pytest_celery import CeleryTestSetup, CeleryTestWorker, RabbitMQTestBroker import celery from celery import Celery @@ -9,6 +9,8 @@ from t.smoke.conftest import SuiteOperations, WorkerKill, WorkerRestart from t.smoke.tasks import long_running_task +RESULT_TIMEOUT = 30 + def assert_container_exited(worker: CeleryTestWorker, attempts: int = RESULT_TIMEOUT): """It might take a few moments for the container to exit after the worker is killed.""" @@ -77,7 +79,6 @@ def test_warm_shutdown(self, celery_setup: CeleryTestSetup): worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) worker.assert_log_exists("worker: Warm shutdown (MainProcess)") - worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -91,7 +92,6 @@ def test_multiple_warm_shutdown_does_nothing(self, celery_setup: CeleryTestSetup worker.assert_log_exists("Starting long running task") for _ in range(3): self.kill_worker(worker, WorkerKill.Method.SIGTERM) - worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") assert_container_exited(worker) assert res.get(RESULT_TIMEOUT) @@ -105,7 +105,7 @@ def test_cold_shutdown(self, celery_setup: CeleryTestSetup): worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGQUIT) worker.assert_log_exists("worker: Cold shutdown (MainProcess)") - worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") + worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded", timeout=10) assert_container_exited(worker) @@ -157,7 +157,7 @@ def test_cold_shutdown(self, celery_setup: CeleryTestSetup): worker.assert_log_exists("Starting long running task") self.kill_worker(worker, WorkerKill.Method.SIGTERM) worker.assert_log_exists("worker: Cold shutdown (MainProcess)") - worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded") + worker.assert_log_does_not_exist(f"long_running_task[{res.id}] succeeded", timeout=10) assert_container_exited(worker) @@ -195,7 +195,6 @@ def test_soft_shutdown(self, celery_setup: CeleryTestSetup): f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds", timeout=5, ) - worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -234,7 +233,6 @@ def test_soft_shutdown(self, celery_setup: CeleryTestSetup): worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.assert_log_exists(f"long_running_task[{res.id}] succeeded") worker.assert_log_exists("worker: Cold shutdown (MainProcess)") assert_container_exited(worker) @@ -313,7 +311,6 @@ def test_soft_shutdown_reset_visibility_timeout_group_one_finish(self, celery_se worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.assert_log_exists(f"long_running_task[{short_task_res.id}] succeeded") worker.assert_log_exists("worker: Cold shutdown (MainProcess)") worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) @@ -397,7 +394,6 @@ def test_soft_shutdown_reset_visibility_timeout_group_one_finish( worker.assert_log_exists( f"Initiating Soft Shutdown, terminating in {app.conf.worker_soft_shutdown_timeout} seconds" ) - worker.assert_log_exists(f"long_running_task[{short_task_res.id}] succeeded") worker.assert_log_exists("worker: Cold shutdown (MainProcess)") worker.assert_log_exists("Restoring 1 unacknowledged message(s)") assert_container_exited(worker) From ec80bb8e3d3b903ed82bc893decf4f9ab917359b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 20:56:47 +0300 Subject: [PATCH 0836/1051] Enable smoke tests dockers check before each test starts (#9292) * Enable smoke tests dockers check before each test starts * Added "from __future__ import annotations" * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- t/smoke/conftest.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/t/smoke/conftest.py b/t/smoke/conftest.py index 4be447d414d..80bc2b9ac11 100644 --- a/t/smoke/conftest.py +++ b/t/smoke/conftest.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import os import pytest from pytest_celery import (LOCALSTACK_CREDS, REDIS_CONTAINER_TIMEOUT, REDIS_ENV, REDIS_IMAGE, REDIS_PORTS, - RedisContainer) + CeleryTestSetup, RedisContainer) from pytest_docker_tools import container, fetch, fxtr from celery import Celery @@ -15,6 +17,21 @@ from t.smoke.workers.other import * # noqa +class SmokeTestSetup(CeleryTestSetup): + def ready(self, *args, **kwargs) -> bool: + # Force false, false, true + return super().ready( + ping=False, + control=False, + docker=True, + ) + + +@pytest.fixture +def celery_setup_cls() -> type[CeleryTestSetup]: # type: ignore + return SmokeTestSetup + + class SuiteOperations( TaskTermination, WorkerKill, From 900103c49b410df3b14d224a77329e4b0e494caa Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Sep 2024 22:13:17 +0300 Subject: [PATCH 0837/1051] Relaxed smoke tests flaky tests mechanism (#9293) --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f8315332cf0..54ebced29c9 100644 --- a/tox.ini +++ b/tox.ini @@ -45,7 +45,7 @@ deps= commands = unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} integration: pytest -xsvv t/integration {posargs} - smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 5 --reruns-delay 60 --rerun-except AssertionError {posargs} + smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 5 --reruns-delay 10 {posargs} setenv = PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/ BOTO_CONFIG = /dev/null From 89ff576eb9ae4efceeb169b6e956fdb57794f331 Mon Sep 17 00:00:00 2001 From: bkienker Date: Tue, 17 Sep 2024 18:13:14 -0400 Subject: [PATCH 0838/1051] Updated quorum queue detection to handle multiple broker instances (#9294) Co-authored-by: Ben Kienker --- celery/worker/consumer/tasks.py | 2 +- t/unit/worker/test_consumer.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index 12f9b6a33b5..eca03e14298 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -107,7 +107,7 @@ def detect_quorum_queues(self, c) -> tuple[bool, str]: tuple[bool, str]: A tuple containing a boolean indicating if any of the queues are quorum queues and the name of the first quorum queue found or an empty string if no quorum queues were found. """ - is_rabbitmq_broker = c.app.conf.broker_url.startswith(("amqp", "pyamqp")) + is_rabbitmq_broker = c.connection.transport.driver_type == 'amqp' if is_rabbitmq_broker: queues = c.app.amqp.queues diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index a4c8ac6b196..e38e1d952b7 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -590,6 +590,7 @@ def test_stop_already_stopped(self): def test_detect_quorum_queues_positive(self): c = self.c + self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} tasks = Tasks(c) result, name = tasks.detect_quorum_queues(c) @@ -598,6 +599,7 @@ def test_detect_quorum_queues_positive(self): def test_detect_quorum_queues_negative(self): c = self.c + self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments=None)} tasks = Tasks(c) result, name = tasks.detect_quorum_queues(c) @@ -606,7 +608,7 @@ def test_detect_quorum_queues_negative(self): def test_detect_quorum_queues_not_rabbitmq(self): c = self.c - c.app.conf.broker_url = "redis://" + self.c.connection.transport.driver_type = 'redis' tasks = Tasks(c) result, name = tasks.detect_quorum_queues(c) assert not result @@ -626,12 +628,14 @@ def test_qos_global_worker_detect_quorum_queues_true_no_quorum_queues(self): def test_qos_global_worker_detect_quorum_queues_true_with_quorum_queues(self): c = self.c + self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} tasks = Tasks(c) assert tasks.qos_global(c) is False def test_qos_global_eta_warning(self): c = self.c + self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} tasks = Tasks(c) with pytest.warns(CeleryWarning, match=ETA_TASKS_NO_GLOBAL_QOS_WARNING % "celery"): From 674656e8232eef12b0b7f02cfcb03e47244ffca7 Mon Sep 17 00:00:00 2001 From: Marc Bresson <50196352+MarcBresson@users.noreply.github.com> Date: Wed, 18 Sep 2024 14:18:26 +0200 Subject: [PATCH 0839/1051] Non-lazy table creation for database backend (#9228) * ENH: add option to create tables at startup when using a db for backend * DOC: update documentation to reflect addition of database_create_tables_at_setup * REL: add Marc Bresson to the list of contributors * FIX: move table creation after the check for the presence of database url * TST: do not create tables when passing custom schema as the databases with custom names have not been created * ENH: remove SessionManager from default arguments, have it as an instance attribute instead It served no purpose being as a default argument, but made testing harder since the principle of test isolation could not be met * TST: improve test isolation by removing the DB file in between tests * ENH: change default option for database_create_tables_at_setup from True to False to ensure backwards compatibility the default value may be changed in the future * TST: add test for database_create_tables_at_setup option * ENH: add warning about change of default value of database_create_tables_at_setup in celery 5.7 * DOC: update doc for database_create_tables_at_setup to reflect change of default value * Update celery/backends/database/__init__.py * ENH: add default for create_tables_at_setup config option, remove warning if configuration is default, add note to specify what was the behaviour before this change. Having create_tables_at_setup=True by default, although it changes celery's behaviour, only has a small impact on users * DOC: add more context to why we don't create tables at setup for test_table_schema_config --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- CONTRIBUTORS.txt | 1 + celery/app/defaults.py | 1 + celery/backends/database/__init__.py | 14 +++++++++++++- docs/userguide/configuration.rst | 17 +++++++++++++++++ t/unit/backends/test_database.py | 24 ++++++++++++++++++++++-- 5 files changed, 54 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c86f3c1d559..39b73c8a38a 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -301,4 +301,5 @@ Johannes Faigle, 2024/06/18 Giovanni Giampauli, 2024/06/26 Shamil Abdulaev, 2024/08/05 Nikos Atlas, 2024/08/26 +Marc Bresson, 2024/09/02 Narasux, 2024/09/09 diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 34fbe94bcec..04bc1927944 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -249,6 +249,7 @@ def __repr__(self): ), table_schemas=Option(type='dict'), table_names=Option(type='dict', old={'celery_result_db_tablenames'}), + create_tables_at_setup=Option(True, type='bool'), ), task=Namespace( __old__=OLD_NS, diff --git a/celery/backends/database/__init__.py b/celery/backends/database/__init__.py index 91080adc46a..df03db56d38 100644 --- a/celery/backends/database/__init__.py +++ b/celery/backends/database/__init__.py @@ -98,11 +98,23 @@ def __init__(self, dburi=None, engine_options=None, url=None, **kwargs): 'Missing connection string! Do you have the' ' database_url setting set to a real value?') + self.session_manager = SessionManager() + + create_tables_at_setup = conf.database_create_tables_at_setup + if create_tables_at_setup is True: + self._create_tables() + @property def extended_result(self): return self.app.conf.find_value_for_key('extended', 'result') - def ResultSession(self, session_manager=SessionManager()): + def _create_tables(self): + """Create the task and taskset tables.""" + self.ResultSession() + + def ResultSession(self, session_manager=None): + if session_manager is None: + session_manager = self.session_manager return session_manager.session_factory( dburi=self.url, short_lived_sessions=self.short_lived_sessions, diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 23b2974f34a..ab17540ae6b 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -987,6 +987,23 @@ strings (this is the part of the URI that comes after the ``db+`` prefix). .. _`Connection String`: http://www.sqlalchemy.org/docs/core/engines.html#database-urls +.. setting:: database_create_tables_at_setup + +``database_create_tables_at_setup`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5.0 + +Default: True by default. + +- If `True`, Celery will create the tables in the database during setup. +- If `False`, Celery will create the tables lazily, i.e. wait for the first task + to be executed before creating the tables. + +.. note:: + Before celery 5.5, the tables were created lazily i.e. it was equivalent to + `database_create_tables_at_setup` set to False. + .. setting:: database_engine_options ``database_engine_options`` diff --git a/t/unit/backends/test_database.py b/t/unit/backends/test_database.py index a693f383f67..328ee0c9c02 100644 --- a/t/unit/backends/test_database.py +++ b/t/unit/backends/test_database.py @@ -1,3 +1,4 @@ +import os from datetime import datetime from pickle import dumps, loads from unittest.mock import Mock, patch @@ -15,6 +16,8 @@ from celery.backends.database.session import PREPARE_MODELS_MAX_RETRIES, ResultModelBase, SessionManager # noqa from t import skip # noqa +DB_PATH = "test.db" + class SomeClass: @@ -45,8 +48,14 @@ def test_context_raises(self): @skip.if_pypy class test_DatabaseBackend: + @pytest.fixture(autouse=True) + def remmove_db(self): + yield + if os.path.exists(DB_PATH): + os.remove(DB_PATH) + def setup_method(self): - self.uri = 'sqlite:///test.db' + self.uri = 'sqlite:///' + DB_PATH self.app.conf.result_serializer = 'pickle' def test_retry_helper(self): @@ -73,6 +82,9 @@ def test_table_schema_config(self): 'task': 'foo', 'group': 'bar', } + # disable table creation because schema foo and bar do not exist + # and aren't created if they don't exist. + self.app.conf.database_create_tables_at_setup = False tb = DatabaseBackend(self.uri, app=self.app) assert tb.task_cls.__table__.schema == 'foo' assert tb.task_cls.__table__.c.id.default.schema == 'foo' @@ -88,6 +100,14 @@ def test_table_name_config(self): assert tb.task_cls.__table__.name == 'foo' assert tb.taskset_cls.__table__.name == 'bar' + def test_table_creation_at_setup_config(self): + from sqlalchemy import inspect + self.app.conf.database_create_tables_at_setup = True + tb = DatabaseBackend(self.uri, app=self.app) + engine = tb.session_manager.get_engine(tb.url) + inspect(engine).has_table("celery_taskmeta") + inspect(engine).has_table("celery_tasksetmeta") + def test_missing_task_id_is_PENDING(self): tb = DatabaseBackend(self.uri, app=self.app) assert tb.get_state('xxx-does-not-exist') == states.PENDING @@ -220,7 +240,7 @@ def test_TaskSet__repr__(self): @skip.if_pypy class test_DatabaseBackend_result_extended(): def setup_method(self): - self.uri = 'sqlite:///test.db' + self.uri = 'sqlite:///' + DB_PATH self.app.conf.result_serializer = 'pickle' self.app.conf.result_extended = True From 2a3cfbaf93dfc5033bfae7aef7891bcb486fcc36 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Wed, 18 Sep 2024 18:46:58 +0300 Subject: [PATCH 0840/1051] Pin pymongo to latest version 4.9 --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 5d7b45c49d9..7526471e96f 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo>=4.3, <4.9 +pymongo==4.9 From 03c6cc5a7840bbf1579d2b3eae7d64d10759820e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:47:14 +0000 Subject: [PATCH 0841/1051] Bump pymongo from 4.9 to 4.9.1 Bumps [pymongo](https://github.com/mongodb/mongo-python-driver) from 4.9 to 4.9.1. - [Release notes](https://github.com/mongodb/mongo-python-driver/releases) - [Changelog](https://github.com/mongodb/mongo-python-driver/blob/master/doc/changelog.rst) - [Commits](https://github.com/mongodb/mongo-python-driver/compare/4.9...4.9.1) --- updated-dependencies: - dependency-name: pymongo dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 7526471e96f..ecf3c6f8156 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo==4.9 +pymongo==4.9.1 From 8afc6333c522a638d287e675c71173ed79285561 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 19 Sep 2024 22:12:44 +0300 Subject: [PATCH 0842/1051] Bump Kombu to v5.4.2 (#9304) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 2ce13715227..88ea0c86436 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.0,<5.0 -kombu>=5.4.0,<6.0 +kombu>=5.4.2,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From eb8344576cbcc4aa6d8cfd907099b0eb8400f9b8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 19 Sep 2024 22:42:19 +0300 Subject: [PATCH 0843/1051] Use rabbitmq:3 in stamping smoke tests (#9307) --- t/smoke/tests/stamping/conftest.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/t/smoke/tests/stamping/conftest.py b/t/smoke/tests/stamping/conftest.py index fa1e3f49874..dc5b87c9959 100644 --- a/t/smoke/tests/stamping/conftest.py +++ b/t/smoke/tests/stamping/conftest.py @@ -6,6 +6,13 @@ from t.smoke.workers.dev import SmokeWorkerContainer +@pytest.fixture +def default_rabbitmq_broker_image() -> str: + # Celery 4 doesn't support RabbitMQ 4 due to: + # https://github.com/celery/kombu/pull/2098 + return "rabbitmq:3" + + @pytest.fixture def default_worker_tasks(default_worker_tasks: set) -> set: from t.smoke.tests.stamping import tasks as stamping_tasks From 571eb4fc0814bf8e76bda503d5dffdf563fedabc Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 20 Sep 2024 22:14:39 +0300 Subject: [PATCH 0844/1051] Bump pytest-celery to 1.1.3 (#9308) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- t/smoke/workers/docker/dev | 2 +- t/smoke/workers/docker/pypi | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index f77db5bccc3..6f0f7a19896 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]>=1.1.2 +pytest-celery[all]>=1.1.3 diff --git a/requirements/test.txt b/requirements/test.txt index 7719f7877db..cba628a0045 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.3.3 -pytest-celery[all]>=1.1.2 +pytest-celery[all]>=1.1.3 pytest-rerunfailures==14.0 pytest-subtests==0.13.1 pytest-timeout==2.3.1 diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 2a8709b6619..47f3704510d 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -39,7 +39,7 @@ COPY --chown=test_user:test_user . /celery RUN pip install --no-cache-dir --upgrade \ pip \ -e /celery[redis,pymemcache,pydantic,sqs] \ - pytest-celery>=1.1.2 + pytest-celery>=1.1.3 # The workdir must be /app WORKDIR /app diff --git a/t/smoke/workers/docker/pypi b/t/smoke/workers/docker/pypi index 981438e0e04..d0b2c21aa48 100644 --- a/t/smoke/workers/docker/pypi +++ b/t/smoke/workers/docker/pypi @@ -38,7 +38,7 @@ EXPOSE 5678 RUN pip install --no-cache-dir --upgrade \ pip \ celery[redis,pymemcache]${CELERY_VERSION:+==$CELERY_VERSION} \ - pytest-celery[sqs]>=1.1.2 \ + pytest-celery[sqs]>=1.1.3 \ pydantic>=2.4 # The workdir must be /app From 13830b18374d249e978ab0f4545569870e734202 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sat, 21 Sep 2024 17:44:59 +0300 Subject: [PATCH 0845/1051] Added Python 3.13 Support (#9309) * Added Python 3.13 to CI (allow-prereleases: true) * Bump billiard to 4.2.1 * Exculde windows from Python 3.13 unit tests * Add Python 3.13 to the supported versions in the docs --- .github/workflows/python-package.yml | 11 ++++++++--- README.rst | 2 +- requirements/default.txt | 2 +- setup.py | 1 + tox.ini | 16 +++++++++------- 5 files changed, 20 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 0bc68e7fb6b..6a5124ee59a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -33,7 +33,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', 'pypy-3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy-3.10'] os: ["blacksmith-4vcpu-ubuntu-2204", "windows-latest"] exclude: - python-version: '3.9' @@ -44,6 +44,8 @@ jobs: os: "windows-latest" - python-version: '3.11' os: "windows-latest" + - python-version: '3.13' + os: "windows-latest" steps: - name: Install apt packages @@ -55,6 +57,7 @@ jobs: uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true cache: 'pip' cache-dependency-path: '**/setup.py' @@ -84,7 +87,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] toxenv: ['redis', 'rabbitmq', 'rabbitmq_redis'] services: @@ -113,6 +116,7 @@ jobs: uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true cache: 'pip' cache-dependency-path: '**/setup.py' - name: Install tox @@ -132,7 +136,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.12'] + python-version: ['3.8', '3.12', '3.13'] test-case: [ 'failover', 'quorum_queues', @@ -163,6 +167,7 @@ jobs: uses: useblacksmith/setup-python@v6 with: python-version: ${{ matrix.python-version }} + allow-prereleases: true cache: 'pip' cache-dependency-path: '**/setup.py' diff --git a/README.rst b/README.rst index 94a78e4fc53..dc2ffe4bd61 100644 --- a/README.rst +++ b/README.rst @@ -60,7 +60,7 @@ What do I need? Celery version 5.5.x runs on: -- Python (3.8, 3.9, 3.10, 3.11, 3.12) +- Python (3.8, 3.9, 3.10, 3.11, 3.12, 3.13) - PyPy3.9+ (v7.3.12+) diff --git a/requirements/default.txt b/requirements/default.txt index 88ea0c86436..3711888032d 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,4 +1,4 @@ -billiard>=4.2.0,<5.0 +billiard>=4.2.1,<5.0 kombu>=5.4.2,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 diff --git a/setup.py b/setup.py index 8cfc1749389..b78932ea597 100755 --- a/setup.py +++ b/setup.py @@ -176,6 +176,7 @@ def long_description(): "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" diff --git a/tox.ini b/tox.ini index 54ebced29c9..55f80bd167d 100644 --- a/tox.ini +++ b/tox.ini @@ -2,9 +2,9 @@ requires = tox-gh-actions envlist = - {3.8,3.9,3.10,3.11,3.12,pypy3}-unit - {3.8,3.9,3.10,3.11,3.12,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} - {3.8,3.9,3.10,3.11,3.12,pypy3}-smoke + {3.8,3.9,3.10,3.11,3.12,3.13,pypy3}-unit + {3.8,3.9,3.10,3.11,3.12,3.13,pypy3}-integration-{rabbitmq_redis,rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch,docker} + {3.8,3.9,3.10,3.11,3.12,3.13,pypy3}-smoke flake8 apicheck @@ -19,6 +19,7 @@ python = 3.10: 3.10-unit 3.11: 3.11-unit 3.12: 3.12-unit + 3.13: 3.13-unit pypy-3: pypy3-unit [testenv] @@ -31,8 +32,8 @@ deps= -r{toxinidir}/requirements/test.txt -r{toxinidir}/requirements/pkgutils.txt - 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/test-ci-default.txt - 3.8,3.9,3.10,3.11,3.12: -r{toxinidir}/requirements/docs.txt + 3.8,3.9,3.10,3.11,3.12,3.13: -r{toxinidir}/requirements/test-ci-default.txt + 3.8,3.9,3.10,3.11,3.12,3.13: -r{toxinidir}/requirements/docs.txt pypy3: -r{toxinidir}/requirements/test-ci-default.txt integration: -r{toxinidir}/requirements/test-integration.txt @@ -89,9 +90,10 @@ basepython = 3.10: python3.10 3.11: python3.11 3.12: python3.12 + 3.13: python3.13 pypy3: pypy3 - mypy: python3.12 - lint,apicheck,linkcheck,configcheck,bandit: python3.12 + mypy: python3.13 + lint,apicheck,linkcheck,configcheck,bandit: python3.13 usedevelop = True [testenv:mypy] From e6dd621683f5666d5854ec3ed8c9953e10c8a27d Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 24 Sep 2024 13:45:28 +0300 Subject: [PATCH 0846/1051] Add log when global qos is disabled (#9296) * Add log when global qos is disabled. * Added unit test. --- celery/worker/consumer/tasks.py | 2 ++ t/unit/worker/test_consumer.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index eca03e14298..0be966755af 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -42,6 +42,8 @@ def start(self, c): c.update_strategies() qos_global = self.qos_global(c) + if qos_global is False: + logger.info("Global QoS is disabled. Prefetch count in now static.") # set initial prefetch count c.connection.default_channel.basic_qos( diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index e38e1d952b7..ae677a7bfad 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -1,4 +1,5 @@ import errno +import logging import socket from collections import deque from unittest.mock import MagicMock, Mock, call, patch @@ -641,6 +642,22 @@ def test_qos_global_eta_warning(self): with pytest.warns(CeleryWarning, match=ETA_TASKS_NO_GLOBAL_QOS_WARNING % "celery"): tasks.qos_global(c) + def test_log_when_qos_is_false(self, caplog): + c = self.c + c.connection.transport.driver_type = 'amqp' + c.app.conf.broker_native_delayed_delivery = True + c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} + tasks = Tasks(c) + + with caplog.at_level(logging.INFO): + tasks.start(c) + + assert len(caplog.records) == 1 + + record = caplog.records[0] + assert record.levelname == "INFO" + assert record.msg == "Global QoS is disabled. Prefetch count in now static." + class test_Agent: From 6a14b784fcdc117d620394b8164a4feb960bd79e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 25 Sep 2024 15:43:31 +0300 Subject: [PATCH 0847/1051] Added official release docs (whatsnew) for v5.5 (#9312) --- docs/history/changelog-5.5.rst | 299 ++++++++++++++++++++++++++++++++- docs/history/whatsnew-5.5.rst | 286 ++++++++++++++++++++++++++++++- 2 files changed, 583 insertions(+), 2 deletions(-) diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index dd58c2492ed..2a46ba0a417 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -4,4 +4,301 @@ Change history ================ -TBD +This document contains change notes for bugfix & new features +in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for +an overview of what's new in Celery 5.5. + +.. _version-5.5.0b3: + +5.5.0b3 +======= + +:release-date: 2024-09-08 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 3 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Added SQS (localstack) broker to canvas smoke tests (#9179) +- Pin elastic-transport to <= latest version 8.15.0 (#9182) +- Update elasticsearch requirement from <=8.14.0 to <=8.15.0 (#9186) +- Improve formatting (#9188) +- Add basic helm chart for celery (#9181) +- Update kafka.rst (#9194) +- Update pytest-order to 1.3.0 (#9198) +- Update mypy to 1.11.2 (#9206) +- All added to routes (#9204) +- Fix typos discovered by codespell (#9212) +- Use tzdata extras with zoneinfo backports (#8286) +- Use `docker compose` in Contributing's doc build section (#9219) +- Failing test for issue #9119 (#9215) +- Fix date_done timezone issue (#8385) +- CI Fixes to smoke tests (#9223) +- Fix: passes current request context when pushing to request_stack (#9208) +- Fix broken link in the Using RabbitMQ docs page (#9226) +- Added Soft Shutdown Mechanism (#9213) +- Added worker_enable_soft_shutdown_on_idle (#9231) +- Bump cryptography from 43.0.0 to 43.0.1 (#9233) +- Added docs regarding the relevancy of soft shutdown and ETA tasks (#9238) +- Show broker_connection_retry_on_startup warning only if it evaluates as False (#9227) +- Fixed docker-docs CI failure (#9240) +- Added docker cleanup auto-fixture to improve smoke tests stability (#9243) +- print is not thread-safe, so should not be used in signal handler (#9222) +- Prepare for (pre) release: v5.5.0b3 (#9244) + +.. _version-5.5.0b2: + +5.5.0b2 +======= + +:release-date: 2024-08-06 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 2 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Previous Beta Highlights +~~~~~~~~~~~~~~~~~~~~~~~~ + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bump pytest from 8.3.1 to 8.3.2 (#9153) +- Remove setuptools deprecated test command from setup.py (#9159) +- Pin pre-commit to latest version 3.8.0 from Python 3.9 (#9156) +- Bump mypy from 1.11.0 to 1.11.1 (#9164) +- Change "docker-compose" to "docker compose" in Makefile (#9169) +- update python versions and docker compose (#9171) +- Add support for Pydantic model validation/serialization (fixes #8751) (#9023) +- Allow local dynamodb to be installed on another host than localhost (#8965) +- Terminate job implementation for gevent concurrency backend (#9083) +- Bump Kombu to v5.4.0 (#9177) +- Add check for soft_time_limit and time_limit values (#9173) +- Prepare for (pre) release: v5.5.0b2 (#9178) + +.. _version-5.5.0b1: + +5.5.0b1 +======= + +:release-date: 2024-07-24 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 1 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the release-candidate for Kombu v5.4.0. This beta release has been upgraded to use the new +Kombu RC version, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- (docs): use correct version celery v.5.4.x (#8975) +- Update mypy to 1.10.0 (#8977) +- Limit pymongo<4.7 when Python <= 3.10 due to breaking changes in 4.7 (#8988) +- Bump pytest from 8.1.1 to 8.2.0 (#8987) +- Update README to Include FastAPI in Framework Integration Section (#8978) +- Clarify return values of ..._on_commit methods (#8984) +- add kafka broker docs (#8935) +- Limit pymongo<4.7 regardless of Python version (#8999) +- Update pymongo[srv] requirement from <4.7,>=4.0.2 to >=4.0.2,<4.8 (#9000) +- Update elasticsearch requirement from <=8.13.0 to <=8.13.1 (#9004) +- security: SecureSerializer: support generic low-level serializers (#8982) +- don't kill if pid same as file (#8997) (#8998) +- Update cryptography to 42.0.6 (#9005) +- Bump cryptography from 42.0.6 to 42.0.7 (#9009) +- Added -vv to unit, integration and smoke tests (#9014) +- SecuritySerializer: ensure pack separator will not be conflicted with serialized fields (#9010) +- Update sphinx-click to 5.2.2 (#9025) +- Bump sphinx-click from 5.2.2 to 6.0.0 (#9029) +- Fix a typo to display the help message in first-steps-with-django (#9036) +- Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039) +- Fix certificate validity check (#9037) +- Revert "Pinned requests to v2.31.0 due to docker-py bug #3256" (#9043) +- Bump pytest from 8.2.0 to 8.2.1 (#9035) +- Update elasticsearch requirement from <=8.13.1 to <=8.13.2 (#9045) +- Fix detection of custom task set as class attribute with Django (#9038) +- Update elastic-transport requirement from <=8.13.0 to <=8.13.1 (#9050) +- Bump pycouchdb from 1.14.2 to 1.16.0 (#9052) +- Update pytest to 8.2.2 (#9060) +- Bump cryptography from 42.0.7 to 42.0.8 (#9061) +- Update elasticsearch requirement from <=8.13.2 to <=8.14.0 (#9069) +- [enhance feature] Crontab schedule: allow using month names (#9068) +- Enhance tox environment: [testenv:clean] (#9072) +- Clarify docs about Reserve one task at a time (#9073) +- GCS docs fixes (#9075) +- Use hub.remove_writer instead of hub.remove for write fds (#4185) (#9055) +- Class method to process crontab string (#9079) +- Fixed smoke tests env bug when using integration tasks that rely on Redis (#9090) +- Bugfix - a task will run multiple times when chaining chains with groups (#9021) +- Bump mypy from 1.10.0 to 1.10.1 (#9096) +- Don't add a separator to global_keyprefix if it already has one (#9080) +- Update pymongo[srv] requirement from <4.8,>=4.0.2 to >=4.0.2,<4.9 (#9111) +- Added missing import in examples for Django (#9099) +- Bump Kombu to v5.4.0rc1 (#9117) +- Removed skipping Redis in t/smoke/tests/test_consumer.py tests (#9118) +- Update pytest-subtests to 0.13.0 (#9120) +- Increased smoke tests CI timeout (#9122) +- Bump Kombu to v5.4.0rc2 (#9127) +- Update zstandard to 0.23.0 (#9129) +- Update pytest-subtests to 0.13.1 (#9130) +- Changed retry to tenacity in smoke tests (#9133) +- Bump mypy from 1.10.1 to 1.11.0 (#9135) +- Update cryptography to 43.0.0 (#9138) +- Update pytest to 8.3.1 (#9137) +- Added support for Quorum Queues (#9121) +- Bump Kombu to v5.4.0rc3 (#9139) +- Cleanup in Changelog.rst (#9141) +- Update Django docs for CELERY_CACHE_BACKEND (#9143) +- Added missing docs to previous releases (#9144) +- Fixed a few documentation build warnings (#9145) +- docs(README): link invalid (#9148) +- Prepare for (pre) release: v5.5.0b1 (#9146) diff --git a/docs/history/whatsnew-5.5.rst b/docs/history/whatsnew-5.5.rst index 09e6aabb0ae..b9ea8689619 100644 --- a/docs/history/whatsnew-5.5.rst +++ b/docs/history/whatsnew-5.5.rst @@ -12,4 +12,288 @@ releases (0.0.x), while older series are archived under the :ref:`history` section. -TBD +Celery is a simple, flexible, and reliable distributed programming framework +to process vast amounts of messages, while providing operations with +the tools required to maintain a distributed system with python. + +It's a task queue with focus on real-time processing, while also +supporting task scheduling. + +Celery has a large and diverse community of users and contributors, +you should come join us :ref:`on IRC ` +or :ref:`our mailing-list `. + +.. note:: + + Following the problems with Freenode, we migrated our IRC channel to Libera Chat + as most projects did. + You can also join us using `Gitter `_. + + We're sometimes there to answer questions. We welcome you to join. + +To read more about Celery you should go read the :ref:`introduction `. + +While this version is **mostly** backward compatible with previous versions +it's important that you read the following section as this release +is a new major version. + +This version is officially supported on CPython 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. +and is also supported on PyPy3.10+. + +.. _`website`: https://celery.readthedocs.io + +.. topic:: Table of Contents + + Make sure you read the important notes before upgrading to this version. + +.. contents:: + :local: + :depth: 3 + +Preface +======= + +.. note:: + + **This release contains fixes for many long standing bugs & stability issues. + We encourage our users to upgrade to this release as soon as possible.** + +The 5.5.0 release is a new feature release for Celery. + +Releases in the 5.x series are codenamed after songs of `Jon Hopkins `_. +This release has been codenamed `Immunity `_. + +From now on we only support Python 3.8 and above. +We will maintain compatibility with Python 3.8 until it's +EOL in 2024. + +*— Tomer Nosrati* + +Long Term Support Policy +------------------------ + +We no longer support Celery 4.x as we don't have the resources to do so. +If you'd like to help us, all contributions are welcome. + +Celery 5.x **is not** an LTS release. We will support it until the release +of Celery 6.x. + +We're in the process of defining our Long Term Support policy. +Watch the next "What's New" document for updates. + +Upgrading from Celery 4.x +========================= + +Step 1: Adjust your command line invocation +------------------------------------------- + +Celery 5.0 introduces a new CLI implementation which isn't completely backwards compatible. + +The global options can no longer be positioned after the sub-command. +Instead, they must be positioned as an option for the `celery` command like so:: + + celery --app path.to.app worker + +If you were using our :ref:`daemonizing` guide to deploy Celery in production, +you should revisit it for updates. + +Step 2: Update your configuration with the new setting names +------------------------------------------------------------ + +If you haven't already updated your configuration when you migrated to Celery 4.0, +please do so now. + +We elected to extend the deprecation period until 6.0 since +we did not loudly warn about using these deprecated settings. + +Please refer to the :ref:`migration guide ` for instructions. + +Step 3: Read the important notes in this document +------------------------------------------------- + +Make sure you are not affected by any of the important upgrade notes +mentioned in the :ref:`following section `. + +You should verify that none of the breaking changes in the CLI +do not affect you. Please refer to :ref:`New Command Line Interface ` for details. + +Step 4: Migrate your code to Python 3 +------------------------------------- + +Celery 5.x only supports Python 3. Therefore, you must ensure your code is +compatible with Python 3. + +If you haven't ported your code to Python 3, you must do so before upgrading. + +You can use tools like `2to3 `_ +and `pyupgrade `_ to assist you with +this effort. + +After the migration is done, run your test suite with Celery 5 to ensure +nothing has been broken. + +Step 5: Upgrade to Celery 5.5 +----------------------------- + +At this point you can upgrade your workers and clients with the new version. + +.. _v550-important: + +Important Notes +=============== + +Supported Python Versions +------------------------- + +The supported Python versions are: + +- CPython 3.8 +- CPython 3.9 +- CPython 3.10 +- CPython 3.11 +- CPython 3.12 +- CPython 3.13 +- PyPy3.10 (``pypy3``) + +Python 3.8 Support +------------------ + +Python 3.8 will reach EOL in October, 2024. + +Celery v5.5 will be the last version to support Python 3.8. + +Minimum Dependencies +-------------------- + +Kombu +~~~~~ + +Starting from Celery v5.5, the minimum required version is Kombu 5.4. + +Redis +~~~~~ + +redis-py 4.5.2 is the new minimum required version. + + +SQLAlchemy +~~~~~~~~~~ + +SQLAlchemy 1.4.x & 2.0.x is now supported in Celery v5.5. + +Billiard +~~~~~~~~ + +Minimum required version is now 4.2.1. + +Django +~~~~~~ + +Minimum django version is bumped to v2.2.28. +Also added --skip-checks flag to bypass django core checks. + +.. _v550-news: + +News +==== + +Redis Broker Stability Improvements +----------------------------------- + +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer +additional improvements. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that +are already running. After the soft shutdown ends, the worker will initiate a graceful cold shutdown, +stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option +:setting:`worker_soft_shutdown_timeout`. If a worker is not running any task when the soft shutdown initiates, +it will skip the warm shutdown period and proceed directly to the cold shutdown unless the new configuration option +:setting:`worker_enable_soft_shutdown_on_idle` is set to ``True``. This is useful for workers that are idle, +waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, +like :ref:`Redis ` or :ref:`SQS `, to enable a more graceful cold shutdown procedure, +allowing the worker enough time to re-queue tasks that were not completed (e.g., ``Restoring 1 unacknowledged message(s)``) +by resetting the visibility timeout of the unacknowledged messages just before the worker exits completely. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks by @mathiasertl: + +.. code-block:: bash + + pip install "celery[pydantic]" + +You can use `Pydantic `_ to validate and convert arguments as well as serializing +results based on typehints by passing ``pydantic=True``. For example: + +.. code-block:: python + + from pydantic import BaseModel + + class ArgModel(BaseModel): + value: int + + class ReturnModel(BaseModel): + value: str + + @app.task(pydantic=True) + def x(arg: ArgModel) -> ReturnModel: + # args/kwargs type hinted as Pydantic model will be converted + assert isinstance(arg, ArgModel) + + # The returned model will be converted to a dict automatically + return ReturnModel(value=f"example: {arg.value}") + +The task can then be called using a dict matching the model, and you'll receive +the returned model "dumped" (serialized using ``BaseModel.model_dump()``): + +.. code-block:: python + + >>> result = x.delay({'value': 1}) + >>> result.get(timeout=1) + {'value': 'example: 1'} + +There are a few more options influencing Pydantic behavior: + +.. attribute:: Task.pydantic_strict + + By default, `strict mode `_ + is enabled. You can pass ``False`` to disable strict model validation. + +.. attribute:: Task.pydantic_context + + Pass `additional validation context + `_ during + Pydantic model validation. The context already includes the application object as + ``celery_app`` and the task name as ``celery_task_name`` by default. + +.. attribute:: Task.pydantic_dump_kwargs + + When serializing a result, pass these additional arguments to ``dump_kwargs()``. + By default, only ``mode='json'`` is passed. + +Quorum Queues Initial Support +----------------------------- + +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +REMAP_SIGTERM +------------- + +The REMAP_SIGTERM "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using TERM +instead of QUIT. \ No newline at end of file From ad882dc991e46b46c1a195771a897de7127f1e6b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 26 Sep 2024 01:17:40 +0300 Subject: [PATCH 0848/1051] Enable Codespell autofix (#9313) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b33e778a75c..f9c7f99be07 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: rev: v2.3.0 hooks: - id: codespell # See pyproject.toml for args - args: [--toml, pyproject.toml] + args: [--toml, pyproject.toml, --write-changes] additional_dependencies: - tomli From 6c2a779fc09133e76efacb0e87a6b1f3f908fa4d Mon Sep 17 00:00:00 2001 From: Mathias Ertl Date: Sat, 28 Sep 2024 12:04:59 +0200 Subject: [PATCH 0849/1051] Pydantic typehints: Fix optional, allow generics (#9319) * add tests for optional args and generic args * fix TypeError for generic classes when pydantic=True (fixes #9316) * add annotation helper functions * use new functions from annotation utilities * fix last edge cases * update docs * mark next() as no branch and add comment as explanation * augment typehints --- celery/app/base.py | 18 +++++- celery/utils/annotations.py | 49 ++++++++++++++++ docs/userguide/tasks.rst | 58 ++++++++++++++++++- t/unit/app/test_app.py | 91 ++++++++++++++++++++++++++++++ t/unit/utils/test_annotations.py | 96 ++++++++++++++++++++++++++++++++ 5 files changed, 308 insertions(+), 4 deletions(-) create mode 100644 celery/utils/annotations.py create mode 100644 t/unit/utils/test_annotations.py diff --git a/celery/app/base.py b/celery/app/base.py index c1bb9b790b5..833818344de 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -37,6 +37,7 @@ from celery.utils.objects import FallbackContext, mro_lookup from celery.utils.time import maybe_make_aware, timezone, to_utc +from ..utils.annotations import annotation_is_class, annotation_issubclass, get_optional_arg # Load all builtin tasks from . import backends, builtins # noqa from .annotations import prepare as prepare_annotations @@ -129,7 +130,12 @@ def wrapper(*task_args, **task_kwargs): bound_args = task_signature.bind(*task_args, **task_kwargs) for arg_name, arg_value in bound_args.arguments.items(): arg_annotation = task_signature.parameters[arg_name].annotation - if issubclass(arg_annotation, BaseModel): + + optional_arg = get_optional_arg(arg_annotation) + if optional_arg is not None and arg_value is not None: + arg_annotation = optional_arg + + if annotation_issubclass(arg_annotation, BaseModel): bound_args.arguments[arg_name] = arg_annotation.model_validate( arg_value, strict=strict, @@ -141,9 +147,15 @@ def wrapper(*task_args, **task_kwargs): # Dump Pydantic model if the returned value is an instance of pydantic.BaseModel *and* its # class matches the typehint + return_annotation = task_signature.return_annotation + optional_return_annotation = get_optional_arg(return_annotation) + if optional_return_annotation is not None: + return_annotation = optional_return_annotation + if ( - isinstance(returned_value, BaseModel) - and isinstance(returned_value, task_signature.return_annotation) + annotation_is_class(return_annotation) + and isinstance(returned_value, BaseModel) + and isinstance(returned_value, return_annotation) ): return returned_value.model_dump(**dump_kwargs) diff --git a/celery/utils/annotations.py b/celery/utils/annotations.py new file mode 100644 index 00000000000..38a549c000a --- /dev/null +++ b/celery/utils/annotations.py @@ -0,0 +1,49 @@ +"""Code related to handling annotations.""" + +import sys +import types +import typing +from inspect import isclass + + +def is_none_type(value: typing.Any) -> bool: + """Check if the given value is a NoneType.""" + if sys.version_info < (3, 10): + # raise Exception('below 3.10', value, type(None)) + return value is type(None) + return value == types.NoneType # type: ignore[no-any-return] + + +def get_optional_arg(annotation: typing.Any) -> typing.Any: + """Get the argument from an Optional[...] annotation, or None if it is no such annotation.""" + origin = typing.get_origin(annotation) + if origin != typing.Union and (sys.version_info >= (3, 10) and origin != types.UnionType): + return None + + union_args = typing.get_args(annotation) + if len(union_args) != 2: # Union does _not_ have two members, so it's not an Optional + return None + + has_none_arg = any(is_none_type(arg) for arg in union_args) + # There will always be at least one type arg, as we have already established that this is a Union with exactly + # two members, and both cannot be None (`Union[None, None]` does not work). + type_arg = next(arg for arg in union_args if not is_none_type(arg)) # pragma: no branch + + if has_none_arg: + return type_arg + return None + + +def annotation_is_class(annotation: typing.Any) -> bool: + """Test if a given annotation is a class that can be used in isinstance()/issubclass().""" + # isclass() returns True for generic type hints (e.g. `list[str]`) until Python 3.10. + # NOTE: The guard for Python 3.9 is because types.GenericAlias is only added in Python 3.9. This is not a problem + # as the syntax is added in the same version in the first place. + if (3, 9) <= sys.version_info < (3, 11) and isinstance(annotation, types.GenericAlias): + return False + return isclass(annotation) + + +def annotation_issubclass(annotation: typing.Any, cls: type) -> bool: + """Test if a given annotation is of the given subclass.""" + return annotation_is_class(annotation) and issubclass(annotation, cls) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 88d1b8022ed..0bbfe4c56b3 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -803,7 +803,14 @@ Argument validation with Pydantic .. versionadded:: 5.5.0 You can use Pydantic_ to validate and convert arguments as well as serializing -results based on typehints by passing ``pydantic=True``. For example: +results based on typehints by passing ``pydantic=True``. + +.. NOTE:: + + Argument validation only covers arguments/return values on the task side. You still have + serialize arguments yourself when invoking a task with ``delay()`` or ``apply_async()``. + +For example: .. code-block:: python @@ -832,6 +839,55 @@ the returned model "dumped" (serialized using ``BaseModel.model_dump()``): >>> result.get(timeout=1) {'value': 'example: 1'} +Union types, arguments to generics +---------------------------------- + +Union types (e.g. ``Union[SomeModel, OtherModel]``) or arguments to generics (e.g. +``list[SomeModel]``) are **not** supported. + +In case you want to support a list or similar types, it is recommended to use +``pydantic.RootModel``. + + +Optional parameters/return values +--------------------------------- + +Optional parameters or return values are also handled properly. For example, given this task: + +.. code-block:: python + + from typing import Optional + + # models are the same as above + + @app.task(pydantic=True) + def x(arg: Optional[ArgModel] = None) -> Optional[ReturnModel]: + if arg is None: + return None + return ReturnModel(value=f"example: {arg.value}") + +You'll get the following behavior: + +.. code-block:: python + + >>> result = x.delay() + >>> result.get(timeout=1) is None + True + >>> result = x.delay({'value': 1}) + >>> result.get(timeout=1) + {'value': 'example: 1'} + +Return value handling +--------------------- + +Return values will only be serialized if the returned model matches the annotation. If you pass a +model instance of a different type, it will *not* be serialized. ``mypy`` should already catch such +errors and you should fix your typehints then. + + +Pydantic parameters +------------------- + There are a few more options influencing Pydantic behavior: .. attribute:: Task.pydantic_strict diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 1ca508d89b3..4bf1887b236 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -10,6 +10,7 @@ from datetime import datetime, timedelta from datetime import timezone as datetime_timezone from pickle import dumps, loads +from typing import Optional from unittest.mock import DEFAULT, Mock, patch import pytest @@ -533,6 +534,52 @@ def foo(arg: int, kwarg: bool = True) -> int: assert foo(0) == 1 check.assert_called_once_with(0, kwarg=True) + def test_task_with_pydantic_with_optional_args(self): + """Test pydantic task receiving and returning an optional argument.""" + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: Optional[int], kwarg: Optional[bool] = True) -> Optional[int]: + check(arg, kwarg=kwarg) + if isinstance(arg, int): + return 1 + return 2 + + assert foo(0) == 1 + check.assert_called_once_with(0, kwarg=True) + + assert foo(None) == 2 + check.assert_called_with(None, kwarg=True) + + @pytest.mark.skipif(sys.version_info < (3, 9), reason="Notation is only supported in Python 3.9 or newer.") + def test_task_with_pydantic_with_dict_args(self): + """Test pydantic task receiving and returning a generic dict argument.""" + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: dict[str, str], kwarg: dict[str, str]) -> dict[str, str]: + check(arg, kwarg=kwarg) + return {'x': 'y'} + + assert foo({'a': 'b'}, kwarg={'c': 'd'}) == {'x': 'y'} + check.assert_called_once_with({'a': 'b'}, kwarg={'c': 'd'}) + + @pytest.mark.skipif(sys.version_info < (3, 9), reason="Notation is only supported in Python 3.9 or newer.") + def test_task_with_pydantic_with_list_args(self): + """Test pydantic task receiving and returning a generic dict argument.""" + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: list[str], kwarg: list[str] = True) -> list[str]: + check(arg, kwarg=kwarg) + return ['x'] + + assert foo(['a'], kwarg=['b']) == ['x'] + check.assert_called_once_with(['a'], kwarg=['b']) + def test_task_with_pydantic_with_pydantic_arg_and_default_kwarg(self): """Test a pydantic task with pydantic arg/kwarg and return value.""" @@ -568,6 +615,50 @@ def foo(arg: ArgModel, kwarg: KwargModel = kwarg_default) -> ReturnModel: assert foo(arg={'arg_value': 5}, kwarg={'kwarg_value': 6}) == {'ret_value': 2} check.assert_called_once_with(ArgModel(arg_value=5), kwarg=KwargModel(kwarg_value=6)) + def test_task_with_pydantic_with_optional_pydantic_args(self): + """Test pydantic task receiving and returning an optional argument.""" + class ArgModel(BaseModel): + arg_value: int + + class KwargModel(BaseModel): + kwarg_value: int + + class ReturnModel(BaseModel): + ret_value: int + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: Optional[ArgModel], kwarg: Optional[KwargModel] = None) -> Optional[ReturnModel]: + check(arg, kwarg=kwarg) + if isinstance(arg, ArgModel): + return ReturnModel(ret_value=1) + return None + + assert foo(None) is None + check.assert_called_once_with(None, kwarg=None) + + assert foo({'arg_value': 1}, kwarg={'kwarg_value': 2}) == {'ret_value': 1} + check.assert_called_with(ArgModel(arg_value=1), kwarg=KwargModel(kwarg_value=2)) + + @pytest.mark.skipif(sys.version_info < (3, 9), reason="Notation is only supported in Python 3.9 or newer.") + def test_task_with_pydantic_with_generic_return_value(self): + """Test pydantic task receiving and returning an optional argument.""" + class ReturnModel(BaseModel): + ret_value: int + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo() -> dict[str, str]: + check() + return ReturnModel(ret_value=1) # type: ignore # whole point here is that this doesn't match + + assert foo() == ReturnModel(ret_value=1) + check.assert_called_once_with() + def test_task_with_pydantic_with_task_name_in_context(self): """Test that the task name is passed to as additional context.""" diff --git a/t/unit/utils/test_annotations.py b/t/unit/utils/test_annotations.py new file mode 100644 index 00000000000..9c8bb6036ad --- /dev/null +++ b/t/unit/utils/test_annotations.py @@ -0,0 +1,96 @@ +import inspect +import sys +import typing + +import pytest +from pydantic import BaseModel + +from celery.utils.annotations import annotation_issubclass, get_optional_arg, is_none_type + + +@pytest.mark.parametrize( + 'value,expected', + ((3, False), ('x', False), (int, False), (type(None), True)), +) +def test_is_none_type(value: typing.Any, expected: bool) -> None: + assert is_none_type(value) is expected + + +def test_is_none_type_with_optional_annotations() -> None: + annotation = typing.Optional[int] + int_type, none_type = typing.get_args(annotation) + assert int_type == int # just to make sure that order is correct + assert is_none_type(int_type) is False + assert is_none_type(none_type) is True + + +def test_get_optional_arg() -> None: + def func( + arg: int, + optional: typing.Optional[int], + optional2: typing.Union[int, None], + optional3: typing.Union[None, int], + not_optional1: typing.Union[str, int], + not_optional2: typing.Union[str, int, bool], + ) -> None: + pass + + parameters = inspect.signature(func).parameters + + assert get_optional_arg(parameters['arg'].annotation) is None + assert get_optional_arg(parameters['optional'].annotation) is int + assert get_optional_arg(parameters['optional2'].annotation) is int + assert get_optional_arg(parameters['optional3'].annotation) is int + assert get_optional_arg(parameters['not_optional1'].annotation) is None + assert get_optional_arg(parameters['not_optional2'].annotation) is None + + +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Notation is only supported in Python 3.10 or newer.") +def test_get_optional_arg_with_pipe_notation() -> None: + def func(optional: int | None, optional2: None | int) -> None: + pass + + parameters = inspect.signature(func).parameters + + assert get_optional_arg(parameters['optional'].annotation) is int + assert get_optional_arg(parameters['optional2'].annotation) is int + + +def test_annotation_issubclass() -> None: + def func( + int_arg: int, + base_model: BaseModel, + list_arg: list, # type: ignore[type-arg] # what we test + dict_arg: dict, # type: ignore[type-arg] # what we test + list_typing_arg: typing.List, # type: ignore[type-arg] # what we test + dict_typing_arg: typing.Dict, # type: ignore[type-arg] # what we test + list_typing_generic_arg: typing.List[str], + dict_typing_generic_arg: typing.Dict[str, str], + ) -> None: + pass + + parameters = inspect.signature(func).parameters + assert annotation_issubclass(parameters['int_arg'].annotation, int) is True + assert annotation_issubclass(parameters['base_model'].annotation, BaseModel) is True + assert annotation_issubclass(parameters['list_arg'].annotation, list) is True + assert annotation_issubclass(parameters['dict_arg'].annotation, dict) is True + + # Here the annotation is simply not a class, so function must return False + assert annotation_issubclass(parameters['list_typing_arg'].annotation, BaseModel) is False + assert annotation_issubclass(parameters['dict_typing_arg'].annotation, BaseModel) is False + assert annotation_issubclass(parameters['list_typing_generic_arg'].annotation, BaseModel) is False + assert annotation_issubclass(parameters['dict_typing_generic_arg'].annotation, BaseModel) is False + + +@pytest.mark.skipif(sys.version_info < (3, 9), reason="Notation is only supported in Python 3.9 or newer.") +def test_annotation_issubclass_with_generic_classes() -> None: + def func(list_arg: list[str], dict_arg: dict[str, str]) -> None: + pass + + parameters = inspect.signature(func).parameters + assert annotation_issubclass(parameters['list_arg'].annotation, list) is False + assert annotation_issubclass(parameters['dict_arg'].annotation, dict) is False + + # issubclass() behaves differently with BaseModel (and maybe other classes?). + assert annotation_issubclass(parameters['list_arg'].annotation, BaseModel) is False + assert annotation_issubclass(parameters['dict_arg'].annotation, BaseModel) is False From e124b99c4c0a7ff50f4424acd3b89fb963506d33 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 30 Sep 2024 21:21:46 +0300 Subject: [PATCH 0850/1051] Prepare for (pre) release: v5.5.0b4 (#9322) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0b3 → 5.5.0b4 * Added Changelog for v5.5.0b4 --- .bumpversion.cfg | 2 +- Changelog.rst | 143 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 143 +++++++++++++++++++++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 290 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index c0fbfd093bc..97286770eb0 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0b3 +current_version = 5.5.0b4 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 6f2501d82e3..7d8d9769175 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,149 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0b4: + +5.5.0b4 +======= + +:release-date: 2024-09-30 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 4 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Correct the error description in exception message when validate soft_time_limit (#9246) +- Update msgpack to 1.1.0 (#9249) +- chore(utils/time.py): rename `_is_ambigious` -> `_is_ambiguous` (#9248) +- Reduced Smoke Tests to min/max supported python (3.8/3.12) (#9252) +- Update pytest to 8.3.3 (#9253) +- Update elasticsearch requirement from <=8.15.0 to <=8.15.1 (#9255) +- Update mongodb without deprecated `[srv]` extra requirement (#9258) +- blacksmith.sh: Migrate workflows to Blacksmith (#9261) +- Fixes #9119: inject dispatch_uid for retry-wrapped receivers (#9247) +- Run all smoke tests CI jobs together (#9263) +- Improve documentation on visibility timeout (#9264) +- Bump pytest-celery to 1.1.2 (#9267) +- Added missing "app.conf.visibility_timeout" in smoke tests (#9266) +- Improved stability with t/smoke/tests/test_consumer.py (#9268) +- Improved Redis container stability in the smoke tests (#9271) +- Disabled EXHAUST_MEMORY tests in Smoke-tasks (#9272) +- Marked xfail for test_reducing_prefetch_count with Redis - flaky test (#9273) +- Fixed pypy unit tests random failures in the CI (#9275) +- Fixed more pypy unit tests random failures in the CI (#9278) +- Fix Redis container from aborting randomly (#9276) +- Run Integration & Smoke CI tests together after unit tests pass (#9280) +- Added "loglevel verbose" to Redis containers in smoke tests (#9282) +- Fixed Redis error in the smoke tests: "Possible SECURITY ATTACK detected" (#9284) +- Refactored the smoke tests github workflow (#9285) +- Increased --reruns 3->4 in smoke tests (#9286) +- Improve stability of smoke tests (CI and Local) (#9287) +- Fixed Smoke tests CI "test-case" labels (specific instead of general) (#9288) +- Use assert_log_exists instead of wait_for_log in worker smoke tests (#9290) +- Optimized t/smoke/tests/test_worker.py (#9291) +- Enable smoke tests dockers check before each test starts (#9292) +- Relaxed smoke tests flaky tests mechanism (#9293) +- Updated quorum queue detection to handle multiple broker instances (#9294) +- Non-lazy table creation for database backend (#9228) +- Pin pymongo to latest version 4.9 (#9297) +- Bump pymongo from 4.9 to 4.9.1 (#9298) +- Bump Kombu to v5.4.2 (#9304) +- Use rabbitmq:3 in stamping smoke tests (#9307) +- Bump pytest-celery to 1.1.3 (#9308) +- Added Python 3.13 Support (#9309) +- Add log when global qos is disabled (#9296) +- Added official release docs (whatsnew) for v5.5 (#9312) +- Enable Codespell autofix (#9313) +- Pydantic typehints: Fix optional, allow generics (#9319) +- Prepare for (pre) release: v5.5.0b4 (#9322) + .. _version-5.5.0b3: 5.5.0b3 diff --git a/README.rst b/README.rst index dc2ffe4bd61..dc7e2bb1cad 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0b3 (immunity) +:Version: 5.5.0b4 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 187dfddb8d2..73587e59612 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0b3' +__version__ = '5.5.0b4' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 2a46ba0a417..49acb1235de 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,149 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0b4: + +5.5.0b4 +======= + +:release-date: 2024-09-30 +:release-by: Tomer Nosrati + +Celery v5.5.0 Beta 4 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Correct the error description in exception message when validate soft_time_limit (#9246) +- Update msgpack to 1.1.0 (#9249) +- chore(utils/time.py): rename `_is_ambigious` -> `_is_ambiguous` (#9248) +- Reduced Smoke Tests to min/max supported python (3.8/3.12) (#9252) +- Update pytest to 8.3.3 (#9253) +- Update elasticsearch requirement from <=8.15.0 to <=8.15.1 (#9255) +- Update mongodb without deprecated `[srv]` extra requirement (#9258) +- blacksmith.sh: Migrate workflows to Blacksmith (#9261) +- Fixes #9119: inject dispatch_uid for retry-wrapped receivers (#9247) +- Run all smoke tests CI jobs together (#9263) +- Improve documentation on visibility timeout (#9264) +- Bump pytest-celery to 1.1.2 (#9267) +- Added missing "app.conf.visibility_timeout" in smoke tests (#9266) +- Improved stability with t/smoke/tests/test_consumer.py (#9268) +- Improved Redis container stability in the smoke tests (#9271) +- Disabled EXHAUST_MEMORY tests in Smoke-tasks (#9272) +- Marked xfail for test_reducing_prefetch_count with Redis - flaky test (#9273) +- Fixed pypy unit tests random failures in the CI (#9275) +- Fixed more pypy unit tests random failures in the CI (#9278) +- Fix Redis container from aborting randomly (#9276) +- Run Integration & Smoke CI tests together after unit tests pass (#9280) +- Added "loglevel verbose" to Redis containers in smoke tests (#9282) +- Fixed Redis error in the smoke tests: "Possible SECURITY ATTACK detected" (#9284) +- Refactored the smoke tests github workflow (#9285) +- Increased --reruns 3->4 in smoke tests (#9286) +- Improve stability of smoke tests (CI and Local) (#9287) +- Fixed Smoke tests CI "test-case" labels (specific instead of general) (#9288) +- Use assert_log_exists instead of wait_for_log in worker smoke tests (#9290) +- Optimized t/smoke/tests/test_worker.py (#9291) +- Enable smoke tests dockers check before each test starts (#9292) +- Relaxed smoke tests flaky tests mechanism (#9293) +- Updated quorum queue detection to handle multiple broker instances (#9294) +- Non-lazy table creation for database backend (#9228) +- Pin pymongo to latest version 4.9 (#9297) +- Bump pymongo from 4.9 to 4.9.1 (#9298) +- Bump Kombu to v5.4.2 (#9304) +- Use rabbitmq:3 in stamping smoke tests (#9307) +- Bump pytest-celery to 1.1.3 (#9308) +- Added Python 3.13 Support (#9309) +- Add log when global qos is disabled (#9296) +- Added official release docs (whatsnew) for v5.5 (#9312) +- Enable Codespell autofix (#9313) +- Pydantic typehints: Fix optional, allow generics (#9319) +- Prepare for (pre) release: v5.5.0b4 (#9322) + .. _version-5.5.0b3: 5.5.0b3 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index cfb8a08c2f7..48013e2c369 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0b3 (immunity) +:Version: 5.5.0b4 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 341334aea700e16c858d781a6eb2d6a3f6813e12 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 30 Sep 2024 22:00:10 +0300 Subject: [PATCH 0851/1051] Added Blacksmith.sh to the Sponsors section in the README (#9323) --- README.rst | 10 ++++++++-- docs/images/blacksmith.png | Bin 0 -> 892 bytes 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 docs/images/blacksmith.png diff --git a/README.rst b/README.rst index dc7e2bb1cad..02928c52f0d 100644 --- a/README.rst +++ b/README.rst @@ -494,12 +494,18 @@ link to your website. [`Become a sponsor`_] .. _`Become a sponsor`: https://opencollective.com/celery#sponsor -|oc-sponsor-1| |oc-sponsor-2| +|oc-sponsor-1| |oc-sponsor-2| |oc-sponsor-3| .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: https://upstash.com/logo/upstash-dark-bg.svg +.. |oc-sponsor-2| image:: https://docs.celeryq.dev/en/latest/_images/blacksmith.png + :target: https://www.blacksmith.sh/ + :alt: Blacksmith.sh + :width: 200 + :height: 57 + +.. |oc-sponsor-3| image:: https://upstash.com/logo/upstash-dark-bg.svg :target: http://upstash.com/?code=celery :alt: Upstash :width: 200 diff --git a/docs/images/blacksmith.png b/docs/images/blacksmith.png new file mode 100644 index 0000000000000000000000000000000000000000..95971e26ea336a503f6bd627a00789781fdc6028 GIT binary patch literal 892 zcmeAS@N?(olHy`uVBq!ia0y~yVAKI&4rZW;-{LxVASD{$6XFV_|NsBr)zx+W{Q0$O z*ZTPQJbU)c+~$=kP=d3-BeIx*K@7+iQ)ToFQu)BZz|8CE;uumf=k1;7GA2VFmOxLr z>i^zpj6W57d8g=Ji*4Io~~V z*5$m9Kl<*{t!o=+W#66|R$gcwzxB}3w>Iq$UzzoN-C=d?lx5hC9UBYRynD3m-{Pl_ zw(WbExi0th@44?TnccTG3E%d!YUl0^Z^fg|mgfGyvuO7k=I^J@hRXhMShITtbK=aL zD0jQew|XxCGoYT7G=);V>Y{U zX{m{hu=neXh-Fi+J>1}1$85%2eezopQ-bVA{$hQM+BMvYr8wWR*M>?Ja7slRq$#5CKrA&HTnwgk1N?!4?JHf^b& zc4Wjf%e5tmk&m`qYE7-Y>?JWR_3ziq=i=v`kUx@VY3iSza`)7yxksjzYkPkynU(B6 z?`M>aZmYUcI4dZm59C+b+~4(QZejhYn5U_Gv~#wfyXtj!<$BY)Td&S-z11ImcSl%w zW!>6OihB&#eh|;|&fX*WdefFiSHq?+HNDqc8XEPnX!Yt#Qu)VsX{@c-d(}0w_j_{e zGjgj7q6}+SAIj|gnLO23TP!EkZ0*LRHH$Kj zPFmZw%=B7MsL|SpEukm399y-KYnkZjZCW#~MVN=}+>-P#?CB*b?c-Y0!gTJgiik>n z8?p3K*U{*yUfFBpRtH4=y`L2|4P~y)R|GjJe m{r6CwSIt5w{W35#{Qb(vSK>OYeQv2dNVTV{pUXO@geCyLgpyDI literal 0 HcmV?d00001 From d589d8d843e43e34afcc4288615497997fd0a46c Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 30 Sep 2024 22:19:29 +0300 Subject: [PATCH 0852/1051] Revert "Added Blacksmith.sh to the Sponsors section in the README (#9323)" (#9324) This reverts commit 341334aea700e16c858d781a6eb2d6a3f6813e12. --- README.rst | 10 ++-------- docs/images/blacksmith.png | Bin 892 -> 0 bytes 2 files changed, 2 insertions(+), 8 deletions(-) delete mode 100644 docs/images/blacksmith.png diff --git a/README.rst b/README.rst index 02928c52f0d..dc7e2bb1cad 100644 --- a/README.rst +++ b/README.rst @@ -494,18 +494,12 @@ link to your website. [`Become a sponsor`_] .. _`Become a sponsor`: https://opencollective.com/celery#sponsor -|oc-sponsor-1| |oc-sponsor-2| |oc-sponsor-3| +|oc-sponsor-1| |oc-sponsor-2| .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: https://docs.celeryq.dev/en/latest/_images/blacksmith.png - :target: https://www.blacksmith.sh/ - :alt: Blacksmith.sh - :width: 200 - :height: 57 - -.. |oc-sponsor-3| image:: https://upstash.com/logo/upstash-dark-bg.svg +.. |oc-sponsor-2| image:: https://upstash.com/logo/upstash-dark-bg.svg :target: http://upstash.com/?code=celery :alt: Upstash :width: 200 diff --git a/docs/images/blacksmith.png b/docs/images/blacksmith.png deleted file mode 100644 index 95971e26ea336a503f6bd627a00789781fdc6028..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 892 zcmeAS@N?(olHy`uVBq!ia0y~yVAKI&4rZW;-{LxVASD{$6XFV_|NsBr)zx+W{Q0$O z*ZTPQJbU)c+~$=kP=d3-BeIx*K@7+iQ)ToFQu)BZz|8CE;uumf=k1;7GA2VFmOxLr z>i^zpj6W57d8g=Ji*4Io~~V z*5$m9Kl<*{t!o=+W#66|R$gcwzxB}3w>Iq$UzzoN-C=d?lx5hC9UBYRynD3m-{Pl_ zw(WbExi0th@44?TnccTG3E%d!YUl0^Z^fg|mgfGyvuO7k=I^J@hRXhMShITtbK=aL zD0jQew|XxCGoYT7G=);V>Y{U zX{m{hu=neXh-Fi+J>1}1$85%2eezopQ-bVA{$hQM+BMvYr8wWR*M>?Ja7slRq$#5CKrA&HTnwgk1N?!4?JHf^b& zc4Wjf%e5tmk&m`qYE7-Y>?JWR_3ziq=i=v`kUx@VY3iSza`)7yxksjzYkPkynU(B6 z?`M>aZmYUcI4dZm59C+b+~4(QZejhYn5U_Gv~#wfyXtj!<$BY)Td&S-z11ImcSl%w zW!>6OihB&#eh|;|&fX*WdefFiSHq?+HNDqc8XEPnX!Yt#Qu)VsX{@c-d(}0w_j_{e zGjgj7q6}+SAIj|gnLO23TP!EkZ0*LRHH$Kj zPFmZw%=B7MsL|SpEukm399y-KYnkZjZCW#~MVN=}+>-P#?CB*b?c-Y0!gTJgiik>n z8?p3K*U{*yUfFBpRtH4=y`L2|4P~y)R|GjJe m{r6CwSIt5w{W35#{Qb(vSK>OYeQv2dNVTV{pUXO@geCyLgpyDI From a1afec48e2f788e0725fea02945710db701a8094 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 30 Sep 2024 22:26:52 +0300 Subject: [PATCH 0853/1051] Added Blacksmith.sh to the Sponsors section in the README (#9325) --- README.rst | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index dc7e2bb1cad..2c2e3220b06 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,13 @@ link to your website. [`Become a sponsor`_] .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: https://upstash.com/logo/upstash-dark-bg.svg +.. |oc-sponsor-2| image:: https://cdn.prod.website-files.com/666867b039e0f3d7fb777efa/666c7b31dc41f7f25b721378_blacksmith-logo.svg + :target: https://www.blacksmith.sh/ + :alt: Blacksmith.sh + :width: 240 + :height: 57 + +.. |oc-sponsor-3| image:: https://upstash.com/logo/upstash-dark-bg.svg :target: http://upstash.com/?code=celery :alt: Upstash :width: 200 From 58569f1c917d6e07e7601365823480764aa92829 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 30 Sep 2024 22:28:59 +0300 Subject: [PATCH 0854/1051] =?UTF-8?q?Added=20missing=20"=20|oc-sponsor-3|?= =?UTF-8?q?=E2=80=9D=20in=20README=20(#9326)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 2c2e3220b06..948dac52c31 100644 --- a/README.rst +++ b/README.rst @@ -494,7 +494,7 @@ link to your website. [`Become a sponsor`_] .. _`Become a sponsor`: https://opencollective.com/celery#sponsor -|oc-sponsor-1| |oc-sponsor-2| +|oc-sponsor-1| |oc-sponsor-2| |oc-sponsor-3| .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website From 70160a435f82aa80a0d1747b6a0833760ecbf94e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 1 Oct 2024 01:25:35 +0300 Subject: [PATCH 0855/1051] Use Blacksmith SVG logo (#9327) --- README.rst | 2 +- docs/images/blacksmith-logo-padded.svg | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 docs/images/blacksmith-logo-padded.svg diff --git a/README.rst b/README.rst index 948dac52c31..126d922a0be 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,7 @@ link to your website. [`Become a sponsor`_] .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: https://cdn.prod.website-files.com/666867b039e0f3d7fb777efa/666c7b31dc41f7f25b721378_blacksmith-logo.svg +.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-padded.svg :target: https://www.blacksmith.sh/ :alt: Blacksmith.sh :width: 240 diff --git a/docs/images/blacksmith-logo-padded.svg b/docs/images/blacksmith-logo-padded.svg new file mode 100644 index 00000000000..849fe48fdc9 --- /dev/null +++ b/docs/images/blacksmith-logo-padded.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + From 82564dafe01882659436e44a94fe4fba553e909a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 1 Oct 2024 01:53:05 +0300 Subject: [PATCH 0856/1051] Updated Blacksmith SVG logo (#9328) --- README.rst | 2 +- docs/images/blacksmith-logo-padded.svg | 15 --------------- docs/images/blacksmith-logo-white-on-black.svg | Bin 0 -> 5926 bytes 3 files changed, 1 insertion(+), 16 deletions(-) delete mode 100644 docs/images/blacksmith-logo-padded.svg create mode 100644 docs/images/blacksmith-logo-white-on-black.svg diff --git a/README.rst b/README.rst index 126d922a0be..ef7c2c0c9a5 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,7 @@ link to your website. [`Become a sponsor`_] .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-padded.svg +.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-white-on-black.svg :target: https://www.blacksmith.sh/ :alt: Blacksmith.sh :width: 240 diff --git a/docs/images/blacksmith-logo-padded.svg b/docs/images/blacksmith-logo-padded.svg deleted file mode 100644 index 849fe48fdc9..00000000000 --- a/docs/images/blacksmith-logo-padded.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/docs/images/blacksmith-logo-white-on-black.svg b/docs/images/blacksmith-logo-white-on-black.svg new file mode 100644 index 0000000000000000000000000000000000000000..9bc513bfa5e8eb89fca2b3e172783b1246fb279e GIT binary patch literal 5926 zcmdUT_d6S2^nZ$~U881=qV}qHshUNNqE@URNU6O_tWuO#jlQjtP(>9%N+NbiVvpLY z#HPHBnjxr);*)-#?;r8`{BmFS-sj%eJ?A{KN(BG0(L&Ab)(i4>fQ%#> z8Fsk0fMr=fTwb(I4DA(Tc!8oXjE0co5;LP2 zdx4{7m1Uz?;`W!87GwFVKF!a(7UB=x!?|RYsZO2zyiIZ9ZI>5OC#1E(y~#>D0RSL- z`FI11!|lYJuI0#IOSKbx^6PN)K{!SrObk3TguHphEX0(B#-iq z^0C|TwE=Ho8hZlZbYfMkcK=BRK5eB1ywsqmP%S;$**cTjPCn=pQqMIg^L>fGmFW7C z>+58vO#YGV%jwj1X~n_9o2zU)^fX-c-P?TwmdubH1IQD&=|jOQfNWtGLUyR}3Iih- z50(uSxYW7}$x{hjq@#(+!uPL+YcMVR_wMbbFr{-{UC&VC}0K zg83mkFt~Z_pN4K&nmZ%`@P@*TjF)-#b?wxSa?V8}39>RhUHdeUeQj<-scYo{F3(5i zz=hG4+e`rAnk7nNhdAz-L6+Yh2g2)}Vfj|2Iv&-8d(4HObJ7Jh$$dJ6SM;2_*^_2t zVu$5Iv4MO;^q_;OHb+Sp0q~#S2X0e!dF`tXBe1|=c+CEf4yQtSCvISPQyxU{3`N>n zSy`#;kB>QA?n)e+yozliQp7kn%w=gS*8nD=eW8cm|3;ncg|aQ3bc<50VF$FMwcF-7 z$sF-9_W3f2_yo5Op&x;1oNW$TQ9Nh6HNt{v)I-b1+f#|@LJtHl{v3{u##CMsr(9w+ z=NA_jHE$MhnQJ~jSm~(>lq{h5raF$o&YG}@_>&|*ic|tSITgs3uv@PWoHZW+XG$ge zB@mN?K~z~z8zS5Bvwia|MPV>q_j^swl^5A?{0azoO)gf1%wn!3^`JLq=^Zj+$S84g zX+GFOsJgaVRS>1*CtbV8I+N@Vds5CLpurSdLMz>2tp+k=l05%Xnk$OAVIC^mVBtKm zgDFm!%xD$}V2*={qOH^ML>o2VUq+uj5xXLG5CN1;-F5@1)9P2G!BW}}GMS7v%ONyD zxj>o6Cqu6ZWg^=wpcOrJXD4N#pbXodu?omqTcASZ9 z2kBg(oSd9%3hYgb9S%g47%`cf%`=pJ}d;N+7KU=Nszbj<2QOKvY{2I%r!}s!b~cY&g29?3 z{meOW4QezELYLVrxhn6p?2qO14T{h=Kn>f0M%X3LwDr@jo88XdPvVbnv3+;_j1OY@aH96=5f$`I%MbFw2HlkI$O+7lUBhpO4~TGGmQvrDlv!wv)u z**YT^&G0U{|UjHxLZ=mgclLip=w$AaSh!mHnsKM#kY>-)`9}gKJf9AJ47+Jtwjf9`#k&9 zUvA>X23tN~cRtwaA8e*Z%j$?@Y$T2XhfM{=x5QS@id(zm^S?W@f-Vi8qcrTES86$6 z`fcc)-){GImWeptFdja3EM5CG1)R7)d8eFK?%Z@Vn@*nSCut0Mo};|J2O+KNqr0Ng z8W15rH6Sat zbvz9IwASD{%n5>S*Gi%k*w*B58@aITb1-)z>R9b#-wpVq<}hiXa?f$FUtA% z%=+}Q=80N{VEg^OkipGWsM;LZWCR)beDGTqt@$aly5D*D2mm}@ra?@fKHhX9m`*Ow zUr}iikU>E+y@q*TKxxARqf|ZPl*@@Dyx|*yJGe#+B?9)2!4z9QFHcVs(V%I@Fy-!9 zg%CohNS|5lPckjSbSj?T(|m8+#CoLK8UMLZ!tJDw*^<1`t3PG>Q`64QE^GApid*U8 zy47j zMgNzq>s}F=VU;tvmq2QzfprD_7(VS^P}bYp&~*RwXYP>G2`VK8?dnC8?1yLy+1pKZ z`b-KvS?Dk+bpeeGS=TKh!}UR!`WM-{b!`<#Q7EoOt6IEXVcpt60Xvdpl}kF|xzO^I zst*yDlX;t1fjgRbV9#2-)xj_GJUMVAzcH&Ws4EK%ZGh;1DTsg%r)tV^6sBHfgzMDh z{LO`yt3!_!6cmUat;!0THe{tPGfj>JSuQKvn_YKBPn?s`C%K#=RCtpdsq*n6QkGaS z!`eAWjT{6=n1%K|Oz!F4cb?^USG0_ltf@UdXuKZnInq=%;KO8ZW_;lATQ6<$BtHp* zV{DLU@H9WOu-9{9YNfr0prhZW7ZhP>Hhf}v_4ATWe_f_+ZB0YXd|HtPp`B4q8&iQoHNSM)kiJ@fa z{Z(VQOtrFc=;w)GcS%9Pw;3z2+7T3eL$m5dc<)?O;KFF+-0vHU_lf-qDh#I>gO!Gu zJ)|s1M7BHs*#<;>2SvI#KkL6@5nG)Ak9`_!7Pon+wD_W*mlc}P4oqs#f425WCaB~R z6$|Zwh0`l0%+`C}eUsm+d}XW&#;u%FyTLAueogBA9%N1iU*X4y>9}eZ+3>ytUR`P%84=!e|A}^gC zUU-)W(=5h}j0@;kFFs%4fSH@Pg(~Jvl!En+0)4Nd-0dakRbkQFSMu8f&0HS}l0O;8Z%(l}ol7!2Qiu%~WHx z`{SHB==^?yz3Wjmt(0T!TlVWxGUmQDb$WO!?BgB24b>xrS3Jv=hoR=TmScugJCl<(ue({&S)MVNFW`=I)+y_{x_b)j< zSTFpnWqTnv_NnZ%Csbg}YVKzVsc##y8>DtZOY~qd=d((iMVwBI_d5wU^q2Pba+zNw zqmI-^Fvzd+Tn^xj8B*SduXlFZ9g_8lX(;4jb{TEwyAVk;<%BOeba9+DZ+(-=5<9nJ zi6aO;@8cMC2Ij{Ijt|H%*7qulF-o=9lPOt`gluTfGd47hd(|$I&m>1XjB=rq|CZCH(s415KZR(>pnE9|&CY5&o|ctErKCy7MD zhyx2sC6*d*VKgL~qjI)=WH}w?l5j`UZ$~Z%Qa3D^49v-JR?s5u1RkCG!a)~O;KSX} zlPXa_S9O#si znz?gEg{P#agaD%QlB05W-G-?u4YcHhF*nGq;SJ(E&71RC?XKKUsCH>m<~s{?mr_~^ zi#TT$-5pnsiIgy;TW$MZ#K*93#kWs<72b3rV%61H{&>SG=iU>Uoc@x)g>EI%=RF`Z+)y;yD{2qy?KkWH9eH{d0E+t#! z|N7F21H;()nZE6t$Uq+DdsRE`$rdpm))P51%}wOkex={{UM{S)R{MWt+;_}s*qPjK zSA^Q&EDJHZ4Jif07MUTsL)is=d zpOaqiT-R*Dd}3C9^J)F-{re4FjbR0b5VpNeKK`7kfF5J|{Mpn=^(=FGycL&ldRMvK z4}Eya$xd~4JBdJafb-iMDDl zAwwqk!zmxf#vki)$fTzH=(wrhFtK%X;)YbF(mmgk?%3q-WBAT)Q4EgSJhd(ZN9DPt ziJ&s+@QuEq3J0af=IaGNjftd1S!+Hvp6G0VUl)7ARUBRB~Pdi_@9D*+Q?AJ)UYsgzFf%bj};9 zm*a9WE14t1QzFB%^{0?YPJY(~HFE!)S>6a6TiE5xca=FUy?6iWngBJGul6>y8o0_& zjWMX#*NB|k{kF9lX*Rc0PHQ=O`Vd(M2kOC{>ZeCk<#pG1GP&>%J&6$&cz>{=M+ByX zMdQWqec{u`-HJD(MFzxdMmHK?;V~gv`ja2Bzn0_Nv?0u(k{+*N+o~H|f1uY_@>j^@ z*@fR$)D`7*d6B(Pc=ou^lbW%(H_ETuM&6K$LLNlz2zNzez61;B6RH+G>BH3 zX{`3_i-i|YDlH0dXHoavS_gRa9wgtVVZ+vI8e`QP+;_;ZaX2+i~>I`ZZxDA z%@5m0+VDob4);Ow!~4|xE#EisFt(DnDp}UMY*aM~FV;E)_mXPe#Y2CosHlWrrXdE= zEZ=_QW4j4HFKz#b)oPf)*Cpkbb83Tss;Yk9QbE{0I1l()`D<8ZDSs%i$8aq(_{DC? z7b%dS7yZDZ{}eWRu`WmSw}=IqQzL!?YWL}49j`FHrQc@y_*3|n%79}a*2|swbui8? z-9FyNe6cO|fvS9k6FkjZmaX??`_`v{iWF>Ffm`vUUn}Q;%b?pfjchJPx&VRf>TeGB zm#?*r5GaC)YgR&7*3VhS#tZmz&veBZl4(y#tF3h|aJF24e`*!TIMMJ1Vo%!oy|h5B zRwO1>nSt!zX3B%uEk-D_4)pUzk&oROj$eV~bFRiIYYU*@cL~AErD|U(-&8cbvLY*z zv}$DK`;k_`J|Wplx3Q~!DS37iblzr~n#@;lYb4>^`qUC5a2-UHMccn5p9 z`t24<;^v*}_N9b-&56%{yi(RK7(YKdv8%aVK9fixm%UEtrHP+XkY;Bdk{hT{xTV>5 zW!06M#C~kLA|3w9C`7{_IrhqvVzj;Ez7&t-e%c|?a8br8Az6@wMfvSmPU1UL=G!;m ztc>5hUiKICY!s8mA+^>q@9!YJ&v_Xh)#dBEaWIAcQeKQ@u=mQT1v4jurEjp?OJm`N zC|RlOPyBPYEU|5`8~N#8zsu-E=N*8(fhlN!6yEI7$vnbh9#6E=z;Rn_6m8p?Arwy> zn@>x9Ak7&KK-%}f_I@gTSbIoRdCTj^QD+Sen9N(<+ zVB9ob{l}Vj18g^8*k<|OQ8N?cYV=Gf_Yy3#WL9xa3uN)wCqqN|*-sUQu!Mg{4!>P$ zHGHsWv%4nRCc29`D15iuQbaqNJo^6hX!E-hct`u|yb?=6T_5dP zb3y0>%`m^0hz&~r$YpekPgtf6$d=h@6@&0@)ESAogC+@YI7ZsW1guZ{9e2IZN<5vZ z7X6%+NnJgjH(=P%X1kQ~r_kd3ENar5&s@ONltuJh8&z$+VBvyvv)*YbY}g4-^uG9h zj<==5+>d^8N8=hD001%h|J~V+YyamvxRhofpySVV!nWdNi#`9Nze^*1Gd;8}B>sN@ D8Mh`c literal 0 HcmV?d00001 From ef79625a1a3f4202e8f0f091eede09148d7a5541 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 1 Oct 2024 02:01:04 +0300 Subject: [PATCH 0857/1051] Revert "Updated Blacksmith SVG logo (#9328)" (#9329) This reverts commit 82564dafe01882659436e44a94fe4fba553e909a. --- README.rst | 2 +- docs/images/blacksmith-logo-padded.svg | 15 +++++++++++++++ docs/images/blacksmith-logo-white-on-black.svg | Bin 5926 -> 0 bytes 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 docs/images/blacksmith-logo-padded.svg delete mode 100644 docs/images/blacksmith-logo-white-on-black.svg diff --git a/README.rst b/README.rst index ef7c2c0c9a5..126d922a0be 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,7 @@ link to your website. [`Become a sponsor`_] .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-white-on-black.svg +.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-padded.svg :target: https://www.blacksmith.sh/ :alt: Blacksmith.sh :width: 240 diff --git a/docs/images/blacksmith-logo-padded.svg b/docs/images/blacksmith-logo-padded.svg new file mode 100644 index 00000000000..849fe48fdc9 --- /dev/null +++ b/docs/images/blacksmith-logo-padded.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/docs/images/blacksmith-logo-white-on-black.svg b/docs/images/blacksmith-logo-white-on-black.svg deleted file mode 100644 index 9bc513bfa5e8eb89fca2b3e172783b1246fb279e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5926 zcmdUT_d6S2^nZ$~U881=qV}qHshUNNqE@URNU6O_tWuO#jlQjtP(>9%N+NbiVvpLY z#HPHBnjxr);*)-#?;r8`{BmFS-sj%eJ?A{KN(BG0(L&Ab)(i4>fQ%#> z8Fsk0fMr=fTwb(I4DA(Tc!8oXjE0co5;LP2 zdx4{7m1Uz?;`W!87GwFVKF!a(7UB=x!?|RYsZO2zyiIZ9ZI>5OC#1E(y~#>D0RSL- z`FI11!|lYJuI0#IOSKbx^6PN)K{!SrObk3TguHphEX0(B#-iq z^0C|TwE=Ho8hZlZbYfMkcK=BRK5eB1ywsqmP%S;$**cTjPCn=pQqMIg^L>fGmFW7C z>+58vO#YGV%jwj1X~n_9o2zU)^fX-c-P?TwmdubH1IQD&=|jOQfNWtGLUyR}3Iih- z50(uSxYW7}$x{hjq@#(+!uPL+YcMVR_wMbbFr{-{UC&VC}0K zg83mkFt~Z_pN4K&nmZ%`@P@*TjF)-#b?wxSa?V8}39>RhUHdeUeQj<-scYo{F3(5i zz=hG4+e`rAnk7nNhdAz-L6+Yh2g2)}Vfj|2Iv&-8d(4HObJ7Jh$$dJ6SM;2_*^_2t zVu$5Iv4MO;^q_;OHb+Sp0q~#S2X0e!dF`tXBe1|=c+CEf4yQtSCvISPQyxU{3`N>n zSy`#;kB>QA?n)e+yozliQp7kn%w=gS*8nD=eW8cm|3;ncg|aQ3bc<50VF$FMwcF-7 z$sF-9_W3f2_yo5Op&x;1oNW$TQ9Nh6HNt{v)I-b1+f#|@LJtHl{v3{u##CMsr(9w+ z=NA_jHE$MhnQJ~jSm~(>lq{h5raF$o&YG}@_>&|*ic|tSITgs3uv@PWoHZW+XG$ge zB@mN?K~z~z8zS5Bvwia|MPV>q_j^swl^5A?{0azoO)gf1%wn!3^`JLq=^Zj+$S84g zX+GFOsJgaVRS>1*CtbV8I+N@Vds5CLpurSdLMz>2tp+k=l05%Xnk$OAVIC^mVBtKm zgDFm!%xD$}V2*={qOH^ML>o2VUq+uj5xXLG5CN1;-F5@1)9P2G!BW}}GMS7v%ONyD zxj>o6Cqu6ZWg^=wpcOrJXD4N#pbXodu?omqTcASZ9 z2kBg(oSd9%3hYgb9S%g47%`cf%`=pJ}d;N+7KU=Nszbj<2QOKvY{2I%r!}s!b~cY&g29?3 z{meOW4QezELYLVrxhn6p?2qO14T{h=Kn>f0M%X3LwDr@jo88XdPvVbnv3+;_j1OY@aH96=5f$`I%MbFw2HlkI$O+7lUBhpO4~TGGmQvrDlv!wv)u z**YT^&G0U{|UjHxLZ=mgclLip=w$AaSh!mHnsKM#kY>-)`9}gKJf9AJ47+Jtwjf9`#k&9 zUvA>X23tN~cRtwaA8e*Z%j$?@Y$T2XhfM{=x5QS@id(zm^S?W@f-Vi8qcrTES86$6 z`fcc)-){GImWeptFdja3EM5CG1)R7)d8eFK?%Z@Vn@*nSCut0Mo};|J2O+KNqr0Ng z8W15rH6Sat zbvz9IwASD{%n5>S*Gi%k*w*B58@aITb1-)z>R9b#-wpVq<}hiXa?f$FUtA% z%=+}Q=80N{VEg^OkipGWsM;LZWCR)beDGTqt@$aly5D*D2mm}@ra?@fKHhX9m`*Ow zUr}iikU>E+y@q*TKxxARqf|ZPl*@@Dyx|*yJGe#+B?9)2!4z9QFHcVs(V%I@Fy-!9 zg%CohNS|5lPckjSbSj?T(|m8+#CoLK8UMLZ!tJDw*^<1`t3PG>Q`64QE^GApid*U8 zy47j zMgNzq>s}F=VU;tvmq2QzfprD_7(VS^P}bYp&~*RwXYP>G2`VK8?dnC8?1yLy+1pKZ z`b-KvS?Dk+bpeeGS=TKh!}UR!`WM-{b!`<#Q7EoOt6IEXVcpt60Xvdpl}kF|xzO^I zst*yDlX;t1fjgRbV9#2-)xj_GJUMVAzcH&Ws4EK%ZGh;1DTsg%r)tV^6sBHfgzMDh z{LO`yt3!_!6cmUat;!0THe{tPGfj>JSuQKvn_YKBPn?s`C%K#=RCtpdsq*n6QkGaS z!`eAWjT{6=n1%K|Oz!F4cb?^USG0_ltf@UdXuKZnInq=%;KO8ZW_;lATQ6<$BtHp* zV{DLU@H9WOu-9{9YNfr0prhZW7ZhP>Hhf}v_4ATWe_f_+ZB0YXd|HtPp`B4q8&iQoHNSM)kiJ@fa z{Z(VQOtrFc=;w)GcS%9Pw;3z2+7T3eL$m5dc<)?O;KFF+-0vHU_lf-qDh#I>gO!Gu zJ)|s1M7BHs*#<;>2SvI#KkL6@5nG)Ak9`_!7Pon+wD_W*mlc}P4oqs#f425WCaB~R z6$|Zwh0`l0%+`C}eUsm+d}XW&#;u%FyTLAueogBA9%N1iU*X4y>9}eZ+3>ytUR`P%84=!e|A}^gC zUU-)W(=5h}j0@;kFFs%4fSH@Pg(~Jvl!En+0)4Nd-0dakRbkQFSMu8f&0HS}l0O;8Z%(l}ol7!2Qiu%~WHx z`{SHB==^?yz3Wjmt(0T!TlVWxGUmQDb$WO!?BgB24b>xrS3Jv=hoR=TmScugJCl<(ue({&S)MVNFW`=I)+y_{x_b)j< zSTFpnWqTnv_NnZ%Csbg}YVKzVsc##y8>DtZOY~qd=d((iMVwBI_d5wU^q2Pba+zNw zqmI-^Fvzd+Tn^xj8B*SduXlFZ9g_8lX(;4jb{TEwyAVk;<%BOeba9+DZ+(-=5<9nJ zi6aO;@8cMC2Ij{Ijt|H%*7qulF-o=9lPOt`gluTfGd47hd(|$I&m>1XjB=rq|CZCH(s415KZR(>pnE9|&CY5&o|ctErKCy7MD zhyx2sC6*d*VKgL~qjI)=WH}w?l5j`UZ$~Z%Qa3D^49v-JR?s5u1RkCG!a)~O;KSX} zlPXa_S9O#si znz?gEg{P#agaD%QlB05W-G-?u4YcHhF*nGq;SJ(E&71RC?XKKUsCH>m<~s{?mr_~^ zi#TT$-5pnsiIgy;TW$MZ#K*93#kWs<72b3rV%61H{&>SG=iU>Uoc@x)g>EI%=RF`Z+)y;yD{2qy?KkWH9eH{d0E+t#! z|N7F21H;()nZE6t$Uq+DdsRE`$rdpm))P51%}wOkex={{UM{S)R{MWt+;_}s*qPjK zSA^Q&EDJHZ4Jif07MUTsL)is=d zpOaqiT-R*Dd}3C9^J)F-{re4FjbR0b5VpNeKK`7kfF5J|{Mpn=^(=FGycL&ldRMvK z4}Eya$xd~4JBdJafb-iMDDl zAwwqk!zmxf#vki)$fTzH=(wrhFtK%X;)YbF(mmgk?%3q-WBAT)Q4EgSJhd(ZN9DPt ziJ&s+@QuEq3J0af=IaGNjftd1S!+Hvp6G0VUl)7ARUBRB~Pdi_@9D*+Q?AJ)UYsgzFf%bj};9 zm*a9WE14t1QzFB%^{0?YPJY(~HFE!)S>6a6TiE5xca=FUy?6iWngBJGul6>y8o0_& zjWMX#*NB|k{kF9lX*Rc0PHQ=O`Vd(M2kOC{>ZeCk<#pG1GP&>%J&6$&cz>{=M+ByX zMdQWqec{u`-HJD(MFzxdMmHK?;V~gv`ja2Bzn0_Nv?0u(k{+*N+o~H|f1uY_@>j^@ z*@fR$)D`7*d6B(Pc=ou^lbW%(H_ETuM&6K$LLNlz2zNzez61;B6RH+G>BH3 zX{`3_i-i|YDlH0dXHoavS_gRa9wgtVVZ+vI8e`QP+;_;ZaX2+i~>I`ZZxDA z%@5m0+VDob4);Ow!~4|xE#EisFt(DnDp}UMY*aM~FV;E)_mXPe#Y2CosHlWrrXdE= zEZ=_QW4j4HFKz#b)oPf)*Cpkbb83Tss;Yk9QbE{0I1l()`D<8ZDSs%i$8aq(_{DC? z7b%dS7yZDZ{}eWRu`WmSw}=IqQzL!?YWL}49j`FHrQc@y_*3|n%79}a*2|swbui8? z-9FyNe6cO|fvS9k6FkjZmaX??`_`v{iWF>Ffm`vUUn}Q;%b?pfjchJPx&VRf>TeGB zm#?*r5GaC)YgR&7*3VhS#tZmz&veBZl4(y#tF3h|aJF24e`*!TIMMJ1Vo%!oy|h5B zRwO1>nSt!zX3B%uEk-D_4)pUzk&oROj$eV~bFRiIYYU*@cL~AErD|U(-&8cbvLY*z zv}$DK`;k_`J|Wplx3Q~!DS37iblzr~n#@;lYb4>^`qUC5a2-UHMccn5p9 z`t24<;^v*}_N9b-&56%{yi(RK7(YKdv8%aVK9fixm%UEtrHP+XkY;Bdk{hT{xTV>5 zW!06M#C~kLA|3w9C`7{_IrhqvVzj;Ez7&t-e%c|?a8br8Az6@wMfvSmPU1UL=G!;m ztc>5hUiKICY!s8mA+^>q@9!YJ&v_Xh)#dBEaWIAcQeKQ@u=mQT1v4jurEjp?OJm`N zC|RlOPyBPYEU|5`8~N#8zsu-E=N*8(fhlN!6yEI7$vnbh9#6E=z;Rn_6m8p?Arwy> zn@>x9Ak7&KK-%}f_I@gTSbIoRdCTj^QD+Sen9N(<+ zVB9ob{l}Vj18g^8*k<|OQ8N?cYV=Gf_Yy3#WL9xa3uN)wCqqN|*-sUQu!Mg{4!>P$ zHGHsWv%4nRCc29`D15iuQbaqNJo^6hX!E-hct`u|yb?=6T_5dP zb3y0>%`m^0hz&~r$YpekPgtf6$d=h@6@&0@)ESAogC+@YI7ZsW1guZ{9e2IZN<5vZ z7X6%+NnJgjH(=P%X1kQ~r_kd3ENar5&s@ONltuJh8&z$+VBvyvv)*YbY}g4-^uG9h zj<==5+>d^8N8=hD001%h|J~V+YyamvxRhofpySVV!nWdNi#`9Nze^*1Gd;8}B>sN@ D8Mh`c From 28edfd4f0f72434c45ee80d035003af0d307f58c Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 1 Oct 2024 02:26:32 -0700 Subject: [PATCH 0858/1051] Update pymongo from 4.9.1 to 4.10.0 (#9330) --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index ecf3c6f8156..7c595f2ab2b 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo==4.9.1 +pymongo==4.10.0 From 97154ebd30edc0c46fc1bec463084cab37e2a33e Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 1 Oct 2024 19:19:24 -0700 Subject: [PATCH 0859/1051] Update pymongo from 4.10.0 to 4.10.1 (#9332) --- requirements/extras/mongodb.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 7c595f2ab2b..393740b77b3 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo==4.10.0 +pymongo==4.10.1 From 118e004856e231f3c40f9badd75155d0dc0de38e Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Wed, 2 Oct 2024 09:33:18 -0400 Subject: [PATCH 0860/1051] Update user guide to recommend delay_on_commit (#9333) --- docs/userguide/tasks.rst | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 0bbfe4c56b3..505522b3cf5 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -2043,8 +2043,8 @@ There's a race condition if the task starts executing before the transaction has been committed; The database object doesn't exist yet! -The solution is to use the ``on_commit`` callback to launch your Celery task -once all transactions have been committed successfully. +The solution is to use +:meth:`~celery.contrib.django.task.DjangoTask.delay_on_commit` instead: .. code-block:: python @@ -2054,7 +2054,31 @@ once all transactions have been committed successfully. @transaction.atomic def create_article(request): article = Article.objects.create() - transaction.on_commit(lambda: expand_abbreviations.delay(article.pk)) + expand_abbreviations.delay_on_commit(article.pk) + return HttpResponseRedirect('/articles/') + +This method was added in Celery 5.4. It's a shortcut that uses Django's +``on_commit`` callback to launch your Celery task once all transactions +have been committed successfully. + +With Celery <5.4 +~~~~~~~~~~~~~~~~ + +If you're using an older version of Celery, you can replicate this behaviour +using the Django callback directly as follows: + +.. code-block:: python + + import functools + from django.db import transaction + from django.http import HttpResponseRedirect + + @transaction.atomic + def create_article(request): + article = Article.objects.create() + transaction.on_commit( + functools.partial(expand_abbreviations.delay, article.pk) + ) return HttpResponseRedirect('/articles/') .. note:: From 2423b7426e5ba10801fc55b89c649f4e7d76b927 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 6 Oct 2024 10:51:37 -0700 Subject: [PATCH 0861/1051] Pin pre-commit to latest version 4.0.0 (Python 3.9+) (#9334) * Pin pre-commit to latest version 4.0.0 * Update requirements/test.txt * Update requirements/test.txt * Update requirements/test.txt * Update requirements/test.txt --------- Co-authored-by: Tomer Nosrati --- requirements/test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/test.txt b/requirements/test.txt index cba628a0045..1389fc0f84e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -9,8 +9,8 @@ boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions mypy==1.11.2; platform_python_implementation=="CPython" -pre-commit>=3.5.0,<3.6.0; python_version < '3.9' -pre-commit>=3.8.0; python_version >= '3.9' +pre-commit>=3.5.0,<3.8.0; python_version < '3.9' +pre-commit>=4.0.0; python_version >= '3.9' -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From e0ca02b76bcbe8e0353d9c14aae7c40cee0a3311 Mon Sep 17 00:00:00 2001 From: pyup-bot Date: Sun, 6 Oct 2024 22:38:36 +0300 Subject: [PATCH 0862/1051] Update ephem from 4.1.5 to 4.1.6 --- requirements/extras/solar.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/solar.txt b/requirements/extras/solar.txt index 318354cc7ed..f0d13a35bb3 100644 --- a/requirements/extras/solar.txt +++ b/requirements/extras/solar.txt @@ -1 +1 @@ -ephem==4.1.5; platform_python_implementation!="PyPy" +ephem==4.1.6; platform_python_implementation!="PyPy" From 78e14c6b5d4d09c81a6cc878921c857bf0d49f41 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 7 Oct 2024 15:06:16 +0300 Subject: [PATCH 0863/1051] Updated Blacksmith SVG logo (#9337) --- README.rst | 2 +- ...th-logo-padded.svg => blacksmith-logo-white-on-black.svg} | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) rename docs/images/{blacksmith-logo-padded.svg => blacksmith-logo-white-on-black.svg} (97%) diff --git a/README.rst b/README.rst index 126d922a0be..ef7c2c0c9a5 100644 --- a/README.rst +++ b/README.rst @@ -499,7 +499,7 @@ link to your website. [`Become a sponsor`_] .. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg :target: https://opencollective.com/celery/sponsor/0/website -.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-padded.svg +.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-white-on-black.svg :target: https://www.blacksmith.sh/ :alt: Blacksmith.sh :width: 240 diff --git a/docs/images/blacksmith-logo-padded.svg b/docs/images/blacksmith-logo-white-on-black.svg similarity index 97% rename from docs/images/blacksmith-logo-padded.svg rename to docs/images/blacksmith-logo-white-on-black.svg index 849fe48fdc9..3f8da98f3ae 100644 --- a/docs/images/blacksmith-logo-padded.svg +++ b/docs/images/blacksmith-logo-white-on-black.svg @@ -1,5 +1,6 @@ - + + @@ -9,7 +10,7 @@ - + From bf6a6f09d78a1ec58b172083d0db16ed5fd3eb33 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 20:52:01 +0300 Subject: [PATCH 0864/1051] [pre-commit.ci] pre-commit autoupdate (#9338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f9c7f99be07..2f994896bc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: - tomli - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: check-toml From b3cd4988467b14c61d42eaae691ad2ab04923eff Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 8 Oct 2024 15:42:08 +0300 Subject: [PATCH 0865/1051] Prepare for (pre) release: v5.5.0rc1 (#9341) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added Changelog for v5.5.0rc1 * Bump version: 5.5.0b4 → 5.5.0rc1 --- .bumpversion.cfg | 2 +- Changelog.rst | 112 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 112 +++++++++++++++++++++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 228 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 97286770eb0..c037934602a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0b4 +current_version = 5.5.0rc1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 7d8d9769175..e74f9b62b2f 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,118 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc1: + +5.5.0rc1 +======== + +:release-date: 2024-10-08 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 1 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Added Blacksmith.sh to the Sponsors section in the README (#9323) +- Revert "Added Blacksmith.sh to the Sponsors section in the README" (#9324) +- Added Blacksmith.sh to the Sponsors section in the README (#9325) +- Added missing " |oc-sponsor-3|” in README (#9326) +- Use Blacksmith SVG logo (#9327) +- Updated Blacksmith SVG logo (#9328) +- Revert "Updated Blacksmith SVG logo" (#9329) +- Update pymongo to 4.10.0 (#9330) +- Update pymongo to 4.10.1 (#9332) +- Update user guide to recommend delay_on_commit (#9333) +- Pin pre-commit to latest version 4.0.0 (Python 3.9+) (#9334) +- Update ephem to 4.1.6 (#9336) +- Updated Blacksmith SVG logo (#9337) +- Prepare for (pre) release: v5.5.0rc1 (#9341) + .. _version-5.5.0b4: 5.5.0b4 diff --git a/README.rst b/README.rst index ef7c2c0c9a5..b01ed8f0b0c 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0b4 (immunity) +:Version: 5.5.0rc1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 73587e59612..1cfecdd6eab 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0b4' +__version__ = '5.5.0rc1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 49acb1235de..1fa6db1aadd 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,118 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc1: + +5.5.0rc1 +======== + +:release-date: 2024-10-08 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 1 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read main highlights below. + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Added Blacksmith.sh to the Sponsors section in the README (#9323) +- Revert "Added Blacksmith.sh to the Sponsors section in the README" (#9324) +- Added Blacksmith.sh to the Sponsors section in the README (#9325) +- Added missing " |oc-sponsor-3|” in README (#9326) +- Use Blacksmith SVG logo (#9327) +- Updated Blacksmith SVG logo (#9328) +- Revert "Updated Blacksmith SVG logo" (#9329) +- Update pymongo to 4.10.0 (#9330) +- Update pymongo to 4.10.1 (#9332) +- Update user guide to recommend delay_on_commit (#9333) +- Pin pre-commit to latest version 4.0.0 (Python 3.9+) (#9334) +- Update ephem to 4.1.6 (#9336) +- Updated Blacksmith SVG logo (#9337) +- Prepare for (pre) release: v5.5.0rc1 (#9341) + .. _version-5.5.0b4: 5.5.0b4 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 48013e2c369..a51a36756de 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0b4 (immunity) +:Version: 5.5.0rc1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From d8c57584efdeacbcf76d380eec5113357cd5b5a1 Mon Sep 17 00:00:00 2001 From: Trenton H <797416+stumpylog@users.noreply.github.com> Date: Tue, 8 Oct 2024 11:53:41 -0700 Subject: [PATCH 0866/1051] Fix: Treat dbm.error as a corrupted schedule file (#9331) * Additionally catch and handle dbm.error as an error for a corrupted schedule file * adds specific test for catching dbm.error --- celery/beat.py | 3 ++- t/unit/app/test_beat.py | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/celery/beat.py b/celery/beat.py index 9656493ecbe..86ad837f0d5 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -1,6 +1,7 @@ """The periodic task scheduler.""" import copy +import dbm import errno import heapq import os @@ -572,7 +573,7 @@ def _create_schedule(self): # new schedule db try: self._store['entries'] = {} - except (KeyError, UnicodeDecodeError, TypeError) as exc: + except (KeyError, UnicodeDecodeError, TypeError) + dbm.error as exc: self._store = self._destroy_open_corrupted_schedule(exc) continue else: diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index a95e8e41409..b81a11426e1 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -1,3 +1,4 @@ +import dbm import errno import sys from datetime import datetime, timedelta, timezone @@ -688,6 +689,25 @@ def test_create_schedule_corrupted(self): s._create_schedule() s._destroy_open_corrupted_schedule.assert_called_with(expected_error) + def test_create_schedule_corrupted_dbm_error(self): + """ + Test that any dbm.error that might happen when opening beat-schedule.db are caught + """ + s = create_persistent_scheduler()[0](app=self.app, + schedule_filename='schedule') + s._store = MagicMock() + s._destroy_open_corrupted_schedule = Mock() + s._destroy_open_corrupted_schedule.return_value = MagicMock() + + # self._store['entries'] = {} will throw a KeyError + s._store.__getitem__.side_effect = KeyError() + # then, when _create_schedule tries to reset _store['entries'], throw another error, specifically dbm.error + expected_error = dbm.error[0]() + s._store.__setitem__.side_effect = expected_error + + s._create_schedule() + s._destroy_open_corrupted_schedule.assert_called_with(expected_error) + def test_create_schedule_missing_entries(self): """ Test that if _create_schedule can't find the key "entries" in _store it will recreate it From 47552a7555ea513711ad11d42028dd2d1addd8ae Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Tue, 8 Oct 2024 12:05:02 -0700 Subject: [PATCH 0867/1051] Pin pre-commit to latest version 4.0.1 (#9343) * Pin pre-commit to latest version 4.0.1 * Apply suggestions from code review --------- Co-authored-by: Tomer Nosrati --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 1389fc0f84e..d515aa62e23 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ moto>=4.1.11,<5.1.0 # typing extensions mypy==1.11.2; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' -pre-commit>=4.0.0; python_version >= '3.9' +pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt -r extras/msgpack.txt -r extras/mongodb.txt From 14a7564e73a838876680119e29fca604fb3e2658 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 11 Oct 2024 22:01:19 +0300 Subject: [PATCH 0868/1051] Added Python 3.13 to Dockerfiles (#9350) * Added Python 3.13 to Dockerfiles * Updated GitHub workflow to run when the docker files are changed --- .github/workflows/docker.yml | 5 ++++- docker/Dockerfile | 20 ++++++++++++++++---- t/smoke/workers/docker/dev | 2 +- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index ba9d6c6ae41..4587775abaf 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -9,7 +9,8 @@ on: - '**.toml' - '/docker/**' - '.github/workflows/docker.yml' - - 'Dockerfile' + - 'docker/Dockerfile' + - 't/smoke/workers/docker/**' push: branches: [ 'main'] paths: @@ -18,6 +19,8 @@ on: - '**.toml' - '/docker/**' - '.github/workflows/docker.yml' + - 'docker/Dockerfile' + - 't/smoke/workers/docker/**' workflow_dispatch: diff --git a/docker/Dockerfile b/docker/Dockerfile index 35b947cc483..e40faa71f56 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -65,6 +65,7 @@ USER $CELERY_USER RUN curl https://pyenv.run | bash # Install required Python versions +RUN pyenv install 3.13 RUN pyenv install 3.12 RUN pyenv install 3.11 RUN pyenv install 3.10 @@ -83,9 +84,10 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local 3.12 3.11 3.10 3.9 3.8 +RUN pyenv local 3.13 3.12 3.11 3.10 3.9 3.8 -RUN pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ +RUN pyenv exec python3.13 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ @@ -93,14 +95,24 @@ RUN pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ COPY --chown=1000:1000 . $HOME/celery -RUN pyenv exec python3.12 -m pip install -e $HOME/celery && \ +RUN pyenv exec python3.13 -m pip install -e $HOME/celery && \ + pyenv exec python3.12 -m pip install -e $HOME/celery && \ pyenv exec python3.11 -m pip install -e $HOME/celery && \ pyenv exec python3.10 -m pip install -e $HOME/celery && \ pyenv exec python3.9 -m pip install -e $HOME/celery && \ pyenv exec python3.8 -m pip install -e $HOME/celery # Setup one celery environment for basic development use -RUN pyenv exec python3.12 -m pip install -r requirements/default.txt \ +RUN pyenv exec python3.13 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt && \ + pyenv exec python3.12 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ diff --git a/t/smoke/workers/docker/dev b/t/smoke/workers/docker/dev index 47f3704510d..015be6deebb 100644 --- a/t/smoke/workers/docker/dev +++ b/t/smoke/workers/docker/dev @@ -1,4 +1,4 @@ -FROM python:3.11-bookworm +FROM python:3.13-bookworm # Create a user to run the worker RUN adduser --disabled-password --gecos "" test_user From abf06c743ada8b72b5f4f7ad91d2e2d82da183a0 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 13 Oct 2024 20:41:22 +0300 Subject: [PATCH 0869/1051] Skip test_pool_restart_import_modules on PyPy due to test issue (#9352) --- t/unit/worker/test_control.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index df1c8c4c04b..877bc82c4b6 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -22,6 +22,8 @@ hostname = socket.gethostname() +IS_PYPY = hasattr(sys, 'pypy_version_info') + class WorkController: autoscaler = None @@ -721,6 +723,7 @@ def test_pool_restart(self): consumer.controller.consumer = None panel.handle('pool_restart', {'reloader': _reload}) + @pytest.mark.skipif(IS_PYPY, reason="Patch for sys.modules doesn't work on PyPy correctly") @patch('celery.worker.worker.logger.debug') def test_pool_restart_import_modules(self, _debug): consumer = Consumer(self.app) From f9f4a9e4fb488bae3f821dd56425ca945fdc1838 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 22:00:48 +0300 Subject: [PATCH 0870/1051] Update elastic-transport requirement from <=8.15.0 to <=8.15.1 (#9347) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.15.1) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 4a02b7374b7..024d7624268 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.15.1 -elastic-transport<=8.15.0 +elastic-transport<=8.15.1 From 0b90cd86fded69375e282361b8389f5793bb868d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 14 Oct 2024 18:04:20 +0600 Subject: [PATCH 0871/1051] added dragonfly logo (#9353) --- docs/images/dragonfly.png | Bin 0 -> 14258 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/images/dragonfly.png diff --git a/docs/images/dragonfly.png b/docs/images/dragonfly.png new file mode 100644 index 0000000000000000000000000000000000000000..54b9c4dd0a2a88dc56d4275abddac20c53327b81 GIT binary patch literal 14258 zcmch8hds`TpiV@Vk$P%lnMi>v>-Dea?A_*40*_rG`)g06?pzs;CbD|Iq~iGQ{Oe zV9SD-gAe$N>ZPigCjihekbcO3^o$!|BbleZiaby;#JT}KP}s|9$pJuhJk6OkB>+58 zQB#yN^dsAuoAbVz8ftK@FZJ%QMauAHQ}LGH|Azv0p%IOoOq1S03XQ;`)B;w}q|mEHY?FOQ^|=92}xLz4<4(YTxidQa}?DfB!*2 zxAD`6xtX}gpw+=5c$BJ}s`yY^{5YagpkYyto}>5c6Yt zL*tl8rmr_t_N(-VFypUIDKlkmzA-89mI7t@V$md}_1czkY+92}$r``!V(RhNLdKDp z2_68@Ey+3Q8JY4cF@D&(Deeo8*e{S^oiywclW0kM4N~UQ3n&{ipdLa91h$MDduKk5 zpK@x1Xh~G?Doxe5GXsF^?}^f~ghSImuvNd3ludCRw(8NEsqk@=QVGW{|@;n}*_4T87Us4?!Bvpgwg!@?(s0nnY3`!kx;RhSUbup;oCP(@Es! z7;kMsT38-PLb$xJtuWkhGyDj4z$%0(uJBBJ1pBFzf6@!Jgv5b3sjG}EBX}y;q3!$n z;JmB)-_3uj3`_PxGjExL64ZQ9TtRL)6b>sp`B0K$$J7>x2^~s(RIxGZKmh;`@A4O8 z4E@sYIo%=AUJh0p7k5`gZb7xKYr0$o0B-27Y$ORYTg)4Y1Nn&`I8@I_PnZjnA6Iy~tl{Q12%J9wlZ0`|yC|=8Q(tx|% ziF?s-@7OdaBa?e}Grx+R<8Dl@+(d#9xSyj|piEg#v2cYvh+$z>OuNyAd!5SzRCAWg zWXh^oPkG&C(9b?j8GAG3IK4*>unI95`oySJ;@{s~PRI9>jEK^~$mFh*V70N?a)J8p z&~zy)NdCJw*u(tOX_WVhHcM;dbEnOcuNu2UV&0Rj%CU%a@$IR}hs*Z>!1OJMUh^k3 zKuZE%YVxV~IrZ|_LP$@Pz}*RuFOO<|6SaZHTV$l5h8SWlFvXYk!dN;M!S@6LDu*$0 zR}I?(^QTTQS9=oCZF~Wt?fn>-^JAS7hz=3}RM&iS4Uv(GozzJPLR?xvCEm})Ab-YP z%g5PVxTnxC6GTWoUR(j&D2VcQL8SP!81mcATofsndG)w!ne)yZF{G)0o-LbOUt0D| z=}NUxx?=wKn>Swz+MbTTLo`qHiCqGfy!f1}l2TEUE%mJ2d)lw00YBGcV1qn>+HCHN zDUx&;p3=g6%^mg!LZ6T`*3#FKNvAaI2%)w7qnf_TAP0hom?^CwXWsYA5QlI6RGjg@ z;r2&FQ%ZWl?>;!2y2a2k;=xi~m0$pp?S-0}%x}@706%5PQ;BBn1_oD--*-rQ;-9tpg1$o@)F!%~PBg7dd0j3?VXl@N&>Mbj4tKtGAH5EPr0w*oHQY_%G*| zQ#uw{xAJE0pIgKq#Kxa`(f}20^uWq>kt>_7Z^Q`NR`12AAu#{)=GjmYPIWqe_iGDx z&aNIf5>I6uZY+;)>sOd?)`&FB{_p$-lLswE>;Y(}0dIAVd2{hPYbW~RbFYQ6TGv2JQ))13xj7VAIcnMxsL{XiTHcymH*&IvEo;0nJs{fh& zhJO0-x1?frfCr#r4AZxWCfAQs6;F80rH3GsB~#4-l5?19T~l@miPe9vCKv=1dT@=e zV-mB86?kuzkfD|9-b**hpV3=U0r*xwsvwlg3S$@*d`s5=kKWGkArFgfyAG_dywO*reLQA1s9!Y5+=FrriW~S7rR~`o|MQ~#wJVDUiqd!E z$Rv^-Adv=AyOYA(kFqB^lWR^58i>IKTEe%zKVP;ykq|v4UOlOqxdS{S>EPMC_}qJ| znmtw_oAw!4K!}lkT3WyF`yH*~f6q1X1A2iwHG@8KhUp zl0THJR?mNVhAmyPGlt7BO29B~@s+SHP{Z+x{2|llh|0POIO--tq5G9FMyj!3^>od= zF~<)oeolY<(_|`$gNn5M!T`{5uiS7lFO%yHSEifqKFqGcVd^Nw;|DCIY~*G6cE&{2 zHFrlaWNL;n^8g0bzmDPKoiZyWh=(bK$)1Q_oK@*R2tBFH%$YmM?Fep=+!uAH5}vyO zw5UxVi}1}O<*_?x)aDB{!!qktnP$;K=`5~pbx_qbC_PKchU5{1stD|7B`SC1Pp_}5 zLCimM+=il5gX6uvrG6ZsPyL%FW*e;|=7p_c@{`@jLa7N)7N33nE+{Jb&AVv{(6ziI zUKFOv&l=d4<-R=CA7tHv+OhmJ9<-gD`lvgNdUAM}Egm5Jf*#CTq2Zg_dCtrzWMn6T zgF2w+{WrtfK}u_jS>J*ahEjNj0mxB^0`oZY;N!zMgLfYu&bbwwt-TOn9a|U8EycpO zl<;6Md6`UmCRBaRWDufR&_5^jq4UTQlG4tMndSZuh(@&n^Cv=-U$X+rD9GDk$<3of zk7DW{E7*qJVJKx?+pZ>Mk+Az_yl!&H$)Jg6d`A4))lZKkZhhI^T<1xW+EQ89>48L?v|Jt+YbrG7G*D4w1x*8Ea`}u;n-KOd8(C-8g##hr}-LEyN z`9<}*5BzBhlPIuVjad_72tBiyG4(o%{}fXQ=mJh6^~O_0OjG5UrPq-#1dM0Emhw5` z@YI$iGf1TQubIN{bBSEWD1q3cwKWKgVdo+iQV|8dEdv12bIQpf^g=7GG7~CDe>#)7 zKx_cTr%1yHbbR*4I=w&3L^1aS$0FbEFXFOqzd^w%=IUkVi$Dh?2RiznfM1hQ|6I6k z&!-H?fDa>+;FrnJz60s^buedim>WuKvXKKXmB=Z9z0}7g(f?ORoh%t(0mevHCMq;gzoXtveNDhEO80hPH-2Z<@ z>4kjkWCao^NvzX&WWbl4=l>3hP~!vu%)dxI1Y}5vp39LYM}Qz?4^2_GfrPQRpgn0) zJ1BehqT)X!Zf{83KBzot1C{mYE(!M^_=dmWFb&XQ4@M?Hj3lFC2#Fgw<6qXaq=)+? z_(uP-V@!cF0>CFQ{{SBE)Z@Vj9p?PssS$Lb0RY;2?krEDm%u%6-&+zGS6=g666?1l z*5-!dv7zL^bCUAB!j=(;>wkYb9-Q4F0}@PXW73-cwu`q* zz-wbnnnB+n>VPuD$AO4&4X|Kf=q4&-K0H{VIj}QRF+dw>l4o27HOE_Ub)|jO1!h}5R+ll*ttPiIy5XSpOHH5Te zl36{BC?5RR@ZSIXK-V-2nl<}l(wdR~&lpg83AaI6Kcm)5BMJJs3P}~DHGLwPYT-3+ zF_IFd|JMVF!oS>Lpq(a6lY|0!(sTv)5N!Sz61}%H$Vv!^}0%Ced^>0_+5DO*HK|xY96wLVlr-FC@XeEdV_DaVs-Rvi17Ee~+6>N%{XG9s2;x6?00J%h6-K zmM!2enc);n5Xt$|-86IW-D~>qRrrojkw*Q0{-o~kgOL~*3#^Z?qPrb5zcwbllqvI{ z3evKH1cNE1+8Eq%Xh`nN3U360wm&)e0!54!*U}BJP5SrXM#aqLPXG&;Q$ZJCP;8Gu zWu@MrEOIgZcP;NPLy}TUk_u1rM#kztQ&6h}gV_#XM_NHSuaq=l3$B4{{(23RBmn4F zzj17hN#jFjlhc!w2FOy&bv-R&;TyX}Mn&>GU{QYpUzdX3_U84ndFRP#Lp2T?#Q$DI z>opjtvsI7?hZO~GUhy>`D9GMRl|^-0?z2Dm4Vk3-3Q8NeYFY9_SSL!HEP6Ns7b4e- z(Cc~bWbqLRYOLcCWr~c{v9i~Kf;^{gQDAj^bs_d;!In0sA$YPRR$(apVnM;5-owGR5B#oph}j} z$a)Z614%#)WaWZ~eTyX_MuZTSgd4Uwcq$02@PRn(lRu;cKYpKQA-qv^zGWu#B7ZD3 z!}^jPrwTbV?!_H<79!@M#1&8`0xcizYb2G|G&0QX>E3AC)8cnnpRk`hg^}jxCT4$) zxg(<4G)%>Fh05?|c%j`{F;+m>1GBrl#}yl&7#cF z>YjsY-p6^8vwRHmCAaH0C<-v8XFAH3*_F|_70CGCGd;~tiC3jR52fdCpZ!QGe!(I$ z=!@AgphyI>7Z?+SFKxP~>b8y~l%FA3+Sd;#5PWh9^xfM*V5clMsK%Qtey7KRZDW7R z?4m4Nn84<+2dX(kEWXvF399FL8}ubFSilGZD&#!{UtFB4`Ikb|u4e@vJ@zE}wMyhE z6HY8L+e>uV2Il2lun92X)ZOT|tbR;hIq<|dS?Heuz!rD#O~`Wn6zmR zJ`-@2YuNIYlELGo9oAuD+6FR8PX?32U5|s$M9|rPi4EsOd!plYT+qGDoWUWCg&BWv zQ2>Rd51Y)kOyk-ngI6>6^C{wX?m@`kptz>smW<8FsG3MD)emWWD zoo}7Eth^R~T9alr+ieBusZ0qdhi@+j250dEh--?-I7q7%HRSvb_~BpSbYzv{I1o4_ zy?{`CkZ-!|MjhvhZa){}$?US<%3f-uHry$*ob8L}qRV^+HAz)$)nzk&YQ`J)f~C?+ zEYP}TjPUE+M?`&)oA@?8%aX$<_^f%yBnHfxgczFOFKagO|5C3*+lIITu?B4<-7i&Gj5bNi3tL@mpmfpX`UP9rbBchps`Jxau8+b3DgSgLoIKodh z#4roNfjLA?)a<%Rn8Zb%S8Q0M+=weQ*pjjzDH}Q+s+Z`#DAII=&`!##txeV!8Ns=B z+Sh_T-!Ill@n^nRuDi`U7|m6?);i8LNK31q`X)sOPAYfU9_svX-A7y_PCUR_Dd({ zK9>mmgr{F5v(_$a&(w}OLW*{t+M7F#53Ws7ik&YBLw;A}NYd>YqG(+5b~X;Z3z5g%Wje6~w zN5{s+#?rkzp|s>r-j7^9-gna1bkCp1aQ{k;*Ewrb_P6qqYfIICD-ny@)cOKReCn@M zk#6tse!eR zt*q^fpkjW1LTA>DE@%t$-0(*hIwa`56j=X}Yy*UE zvDQ}sxfQ${9C-YU7*7@Jf->gJXtv+@#`y ziA{?>47GjCHL@l7%`wneTxYpXD$UuTe*!ONoUQfNXZc3lUP4Mxe0#TXnj?d&M&OD> zcp?88MZuvuV-GI!c03X4&+^;?o|C$4>gkN-n0)>irChnO(A7G+GpLa2A=9oej@qtW z^Y+wy`70}BsKNVJVIxX@y3H@8tMf0H6^%iS=Fx{BC|?of;uLK_fxo8ov(xJ92=Y0* z-eu&#<|vEExWg0qtD!utIk~d#j{K(?l(fCYUjlI!a=qEeBVUci>?cc&C_^|+LVWFi z&=E#c0m)tBx63Dw;vRBoWQC*=2a!fMwd#InWuy2!7Y|CdSu5T|l^|sGsmaKY%wFNjDN8A%8qK0tIf%s!P~!xa&`~Mm=960z(0+Nzz*xX&7EuQc{nzK=7=a{yv;t z5m>M9_+vb_Jvbh`V#Q08MnmfdicFez;J8rxVGr}HtIPRcf3~(`=S^8UF)qKzq&5`xKnGK;yQ8TP9!5na)loxo7XQz6mzX zy$G~4{wfm{yr52CoA7C+#Ev*RSkLfS@|(}96wN^FB_ekOLyWV6n}3B8QTW9qa7J{dU&dz$Q5$8p7Cg|hrMqI5egtnGtLX?~D7;4qzzEf-UhO63`r#VzAiwoKy z?|-9t}RLb82BenM6Ze9e);EvgkzSX%g0XKbCfjMp_4$VZ8{B)k1C8_``yH(H5N ze?Wd=J*~xfp<9goomLtKgdBcNTsqxFMcT4@i8Zg@qD7w}lfA8WmERk8!$-9!t!Et3 zX8e-B+#R8ko}YtTW{E#8Tv5aZ^OTXM&|Ocj&7C1{IQn`yiMQZ;kSJf5Snv0#ILHbjqd?f#MR)k+JW{k* z;#>7if-#<9MMnFcQ!&}Dh!p>4}wVKG+Q%PEEa4VkB`mK%>u)GNncYU@`Y1PvYVt)H!I-7T5- zKbl>~9+WrO9%XJiML1y}|JECS`C!)@l;lflTza`DdK}O(RnV&!;J zAuV?SU71JfBKo~g8~E}yCUV&qV8o0VLAaXhsk_ymbRV;boEBuqvUFe;8Wf#eHAZ)2 z5rvlPI*?ck2^|-aJhCfzM3K@S%1>~%Ra^UAQjZtAx)s&-hZK1kC%8Hef-($f#>Cym zxU7ZZc0$^nC2mQzOxf@u`dzXEu$3;MKh)dSs#D_G8(n>ej6WJoOFa{K0zYj_YG-eB zAqJz9Sm&m*_;}pzp^~ye0A$rh^N-YY=RK!*+J9Aa8<1FM4KA*f^E4YKECe&Loh;qz zb$DYiMKgO4)H1dKTXPX#E36MkKjVk7O8+&f^MgrDQx}Kcwf$_u9%B^C{@wK5a-s`=ojavel-d zFWl~&{+Au-zjs=-C&6X%vUucgm~ACYN?%GDY%UyB5X#;`Etnhp%$W2?tQ4hK)Dv8j z$2Yw+b+ev(@ZV`&Y1`@TOnOJZi(w~nI+dMC9@&Bd#Z-rCxJ6LoXNX7DLhYnnA7NSi zWXQc#v`bWqv2~#7eYxm}t&9oZf_liBn0JE`TCt%{Fg1SPX?lJ@N2t;H;Kos>ixFMm z6F=YmdWU4VP@|hK?OH=3F=S0}J7Bv@-R^R!JrOkSiKL-kVPw!zbLc?Y!c%3Ai~;TR zH8Co2t*sU*Iwf9IR8$Lfz8q*-8|riPi{4oir3RIMdOOQ}YK%@?T5lmYm!&Fx+LZkX z==c7NYZlZ%u9)`wE_mV^dZo(^BXa_d(6#v1KgPvy>C@Jb?U;6E^#irG@B#bYj>3*m zB))Luk6(h9dc$JreF>J(J{4V<%wW{yMc~2IuDkWrcnh-v>$J%BEsTATpfbVH(6`$@ znOL`-wg*qjtDkWH@|)mVzZO(tnR$zl0^ZL|$$&5Y=s*HBpZWM6`L-cNOa8wjT zVcjpu|5hCl(UcTleBWoE`xRfjFOA_q`bF|~Pi5}W@afq&d#N*=jsU4ZM=2Jh;Z56< z7aC(qJ!i*+=Pkdr^jr&gTWa5$u6O?qw4U`|ruLv?gwslX;HJ!(a>t*%O@A`j-6jK= zo@%uG_w&3_8ui}VVdNGczE^yvY=MA0_ z$(X!g(h*#c-L+bKtAx8T-l)ZkNXN_L^%S1P?ruEDTK1dgeIGOCJp?&c-f$QpHYj>hC)4Kof<`i_Yvtq z__yFOWg(-|7Q^TP-$x3J08eYNNDEA28@4Xu2H)zVObPxOSl@=-lMB0p9&psK3CGjm zKi{nzi&o8f$W>WlQSA(`#bTu4cMsFGxcO)+nTykzOQcQi%ftok3QR}IPk_duSzu=!%L6sC;>OnHjsEQ`*FEs z4ylT&fsEi`dyA;4O7F*ID5e2KQY7PXmHpjdUi6L1^DBHG=9XJz!lGPOBqjvy`3*VR z*G9~ipM%qw0tv!zncPLWeCy1N;M@*56U31&uV z+uPio!~6MOf5-QqIibSqN7-nvKx705akUz}Y}A&fF%mspH>5*{Ec3#Dz|k#SQnr%E z(Bc;6Y#u=8ck7~0nRU5Q@*Oni&bGS^r!KZ6xUjM)?~-s_&hK=wv!IYYU+|1X!)7w= z5OL&iWQAM$2jfor1dj+-N41T55>%nzlxQU@y9-)Agf)UYOEl$6fo}w*5C~!2OKwSp z+=vaAX2ff`mbYgp*YE+5Y(H3dx|0C3itSUGM<(F)7{(VY?oN)^Ol z6j{KfE7#T44SP@?)||A^p&I1MwXhYUX|X+9KY(rvZK-czXe|EimnW$wt@`qn|Bd@5 zc)Y%2HcMQPm-=1{=PSO!r!nb8JS*c4svpZ=Hq#?D4XV<5+OgSn|IGPx*P)d|vUWm= ze}nn4yz3V#u~ru8Zj{dijq$d)9K#g9)L2S_YLeokXhpM@lYsXX5f(67B)J^VE6Y>k zzsDIVu8blXL?7b9+{GzzG$F`htB#Y?!B2Slz@P7v6)4#C((t8Z2tMkW1ZAG}dqGaON=eN)>TZzJdv^H0bmPK$O<~1_?s+#dX_c)<|u)@0v!aYR{wB9}Tg0Cw_K4_LskX zaXqYYBe_aq=JxVQ>e^-eR$H3aCHbbkl>Ed>9_kq02+Nj~(6ZbpohAXsUWx40J5m-7 zAqD=imetgh8rRkX2x1b)#u6K?FD{SbOTXWJb!oLf>mV-3lz&tVm!K@2N3X>ueD4)s zl*tEdF@ru8-m3f$tMPF3>E9}%tbvD`xf}_Gh)DLF>HR*M8|t=x#*LqoktZ->>%I3L5H+05o1;4l#c}+?3X>;Wf`}p)by{+gH!Y1Ae4{5vpwLg&Qq?3v?b~dh{?5v=Np;p#vcZrT9%?pp)h#vOa(_^b<2p?B;%w91iU+WS*jg^)Gex@!VL7H zSXTis$18AH+0sx7eh9Rl6?Uw7Q^KHs^PjJAICG9R6D$?!HQg^AqL$M?a}BmtxXraszlvUP|0Qde{J|o`miCC2A|A#c5;r z(y=>Mu=niXmCmX&M?1BR#=EBbXA+g4ijh|iwXc5c2?`Kv!Zlh*(tKE!j9b=i8ynu% z!Y4;^Y4}_O(Dr((^?$Hb$c*%^e31KB4U2lAO!x~Q{Ozx$LiSN2%@`sTtT$^cJsT-G zOb$gFUm$fd{m9^@Mp*~Rfk(Am4kDTcY;{+T@~Q-zMIAL8h@KnKe#!AUzq?!Cyt^DP zn7~c=N(LzX>a*wm<({#>ni#qUWk8wOsFFi-b0m=vuZ8A}WB(fCqLhU6pT%iqGj^`~ z?DIGo#O3iIcB5aOp%suwd=56GDw`>lO9_%@`#=X%zww^Rg7*Tu?*UC)Q9qP z$CvhCr~Syo{sdXA{1mHDv_fXFTubsAd9ROWr6Eh;C;2uB0J^H5$u&H)qba z_5;qhyuzlZ`V*0hQ$CpN(_swb+D|_}HnsEO$R2py@b7Z`u@-N=Ip85_PAk1hGsP!< zJ~7~4K_@0FvurXCI{CrmNplYV!-a-FDoI5+nxaa?mfz4RZdd5k7&1Y|4(O3$7djeeqj=$$;(}`|93i+-~jH zM4eBatt9agZZ+?q1~^KGs-awX$VW@WPuG4*y@_un3)%SVA`8{JS<8+RWTt%a9?yr@ z&^GapB2HJXgV%Ld)-0j?ZgWDe?B#ENrVboK_xI~1{+aDLO)I8<2>S4~ z(iOGGt365gD_w(LAPH3w_g|lRkym+Tj>^z%Tec?Y9Jdz7&h4t@jMk*VDy;{WXTgG% z_>Abo0tiN!Cevgv0+R%l7raHFg77ssv8)8@xPEV@=;+9|nMbttl*e7>)O8erG;K}i zH1ksTPCReCu7ALn99k((fiXZ30ent zZW-MPUbqYw#kU^!6mW%a+EBF|dveSG&H!fJFr-`kDD}YYP4Cp;#EH`dcsWymW#3)8 zKVmXxkZk2r8JBhd+c~8dt@13bC{9}!Vb#FxT!m0q+scVLNgqEOe|ZHdv4VO;*C+LC z<{30AYdIx6lCw0`zW6o`Zl#35@*CMKI;%f!fDtMeu0nn&#vjx=6%}XTX!t^)fazg5 zmX|t9x)1%-d|8O^wTsSsh^vlqU{>>I%i)$@mFe1^B%aT=ahZ52&j-=RS&o7ZH=0kZ zQkwsMolBd*RwcIUXDI;#V>|hJ{@fcj>zT5YDMnu}L*S~ml9Mi@A{Uk$<+8S@LWA)u zLUG5!uF&&82R=W|TeU_%x?_0(;d8Kyc@@cjchnVjdPCSq0Z%fRn)bk zFCGP#Sd)KXJ;o{<_L2F0lsdhYQZ%OM`qnAW*>1)PFP&rm(Q{81smB~agpDJ*BY}NgcT*Vyjr!U(5sONn z+u(th{(Nd;iYu;LnGjps_)^kC*rh0<+TW_sUk(uc>nB69Lw?Gqo3SE`<)xbD?eIfM%Rf{0Xq!$jeifo%V6LIePw5LJsL`ff= z_eA3U*zRDLaDzeZIe0&sHI(IRpgHB0-5>ZlIYEiuD$?$6T;b$^Ju!T09wquLF4rNBmkc@aQ27Wxqcv(jJcr*<`3(J+eWm>A`thQ*JDz$}=J^lRsUUqJ|d4afd{L zuOnPee$~?u9mc7_>q_5To`X3zA1qCda~0v#Ctmlcs;FcW(BpR(g5P_$Le+uYzA0mO z^BK6cBwaXe->Z}VMwf>vF+8eTuX8@?sOdC`>kl4&nAnq&@EWD)f*tF&;Yup@R7{j4 zQ?(s1E-oe?umzJNgyDYKO{-|*qEWNKXXB5iU)LA~PdW_iCg7m)-QftE-T;9+{3H6I z@3a*tEg69?!bbdt*O?r^ne6VRA2N#yFpIyW%^Mniy$G-CJpDRv#GzS`xPY>?RZ^p7 zvp;r23iJX4P;gQad4en!mYU+$@AD!#oeKQ5G>1!uc-vn`e&2|| zzb(}n_6kV}#p;bGh}1!L`*X0<4ysvs06Se^ec%I2cN0TvyUklmOI4KJ6A&k}*Q*67 zXknW4_?+B5_%CmNb!YDr(_HD_$H(GU2{@ZZpHMgGERCFZ?L>~Ug51o($2k@x-U$_} zl-zr)qx&hKU||il*`Rl_JjyQ=zvT}ejr|NH>dy?U`Q277>zuuIzuWX8sf3?XJt zVC~HTFI(2`CgEWrWe4}o*1=no%7d1Kg?H;mfhOs`z8%Jdq*&i{<^0^LP47%irut|5 z>R7An)=}Ik>~MzX+Pak<5M>#vRgmbrrfXw9Cl+hf-aZZQzEyPo)D_HWE_5_Eh;zEDnn2I>S49LL;Y~S z>O!TgnWHi3Lg*SL;SVZ6U*(AI!vP%ftpFSf)MUtD-x7rQ9WDJ6GEXi4ja-}lw&ppTdEcV%Q)&m`F>fbq?Ov#Oyx0O#;PQ7ZrC z9(uCM^zTo|fgf5z?Z?X3fJGa&xjPF~QT2HL>lI*i%LnCupWSwz(e<7HH@)#IN*Q&_q3D16xhg+$#5V^eEAc+T@fu;O)D=BH=GP&|zVU zUxAA0j(ku7D>#bPt~rUOLHhupZSGi{39J;7)yf)FxptM3kW)YhtlsT&yN-{ewwiwh zmJOXXU7C1TkpzTA`Bj`7Zm(Pibn}rxh3a;}dFVs+(AWkLdqi*G(hFiB89?Q-a?o+X z)BdUo<%so+wW0wY{u388&%lfX05vm>#7TufE(!vM?-CiX`b?nR*Zms6USb?V=Mtkl zXeaQpHR$~o3Zt{Kbf zgZpJc@f)Nme*D?KNq2Zr4UOB+xL7i{aJQiFbxZs56k-03*yFa#rF|ykVJFT|OREi?)8g*OS|9$N3l<9g z6DGDFaO~={9|Hgt5UfkYJ?)rwl7`sZeU&)?S?0zb`RheWLOoJs9kb;t$3tML*K>i} zTwHXVI-EMdLzSbTV?Q^rYPpZE&-eV^(#C#+Y8LS9J%rFI5f0W-P$nvB-M)p9P`C>$ zS|bQ867y8R!%yr<#k`$`3igFkz{du-G%g_xETbBL$%^s`^?yW6IsE~T0>o-LdzWvI z(g3=##&yd6J;s`1G=PS6iX9s+3H%Q&C8|8=yYE!M%V-d(`ZXX&RgP0X?M<9JESlvu z@IVig91Z9@vXtSGEZ>Z=T)bS|TJ@^#96(2s3-%zZBn1B!Ed3m$7vcPec>%W+kt(p8 zH`5(RiwXz-!>fP>|7rd1?vjMj?k%#88b#W<6R_a8e)<$XOE@z=I6w Date: Mon, 14 Oct 2024 18:10:35 +0600 Subject: [PATCH 0872/1051] Update README.rst (#9354) * Update README.rst * Update README.rst --- README.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.rst b/README.rst index b01ed8f0b0c..c7bb5fd4200 100644 --- a/README.rst +++ b/README.rst @@ -26,6 +26,17 @@ Available as part of the Tidelift Subscription. The maintainers of ``celery`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. `Learn more. `_ + +Sponsor +======= + +`Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, Dragonfly frees developers from the limits of traditional in-memory data stores. + +.. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.png + :alt: Dragonfly logo + + + What's a Task Queue? ==================== From ac4f2dc175ca8446922e9226594cc59345e9a267 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 14 Oct 2024 18:13:12 +0600 Subject: [PATCH 0873/1051] Update README.rst (#9355) --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index c7bb5fd4200..2b73e1c13bd 100644 --- a/README.rst +++ b/README.rst @@ -32,8 +32,12 @@ Sponsor `Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, Dragonfly frees developers from the limits of traditional in-memory data stores. + .. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.png :alt: Dragonfly logo + :width: 150px + + From a38b52b26a19a40a6a34fbcdbdbcf65426e87e1c Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Mon, 14 Oct 2024 05:55:38 -0700 Subject: [PATCH 0874/1051] Update mypy from 1.11.2 to 1.12.0 (#9356) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index d515aa62e23..a4031d1168b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.11.2; platform_python_implementation=="CPython" +mypy==1.12.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt From 5830f5b8b9dda53e80449c818249e17d9e691782 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 14 Oct 2024 18:21:48 +0300 Subject: [PATCH 0875/1051] Bump Kombu to v5.5.0rc1 (#9357) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 3711888032d..0e640526579 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.4.2,<6.0 +kombu>=5.5.0rc1,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 03e3359320c9f72507a32a51ab61605db503da98 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 20:23:05 +0300 Subject: [PATCH 0876/1051] [pre-commit.ci] pre-commit autoupdate (#9358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.17.0 → v3.18.0](https://github.com/asottile/pyupgrade/compare/v3.17.0...v3.18.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2f994896bc8..f5b61ccd17c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 + rev: v3.18.0 hooks: - id: pyupgrade args: ["--py38-plus"] From a353854f50b714f0ca326b60f8c96e42665b6045 Mon Sep 17 00:00:00 2001 From: Helio Machado <0x2b3bfa0+git@googlemail.com> Date: Wed, 16 Oct 2024 13:35:02 +0200 Subject: [PATCH 0877/1051] Fix `celery --loader` option parsing (#9361) * Fix `celery --option` parsing * Remove unused import --- celery/bin/celery.py | 51 +++++++++++++++++--------------------------- 1 file changed, 20 insertions(+), 31 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index 4aeed42597f..da1fff5be24 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -11,7 +11,6 @@ import click import click.exceptions -from click.types import ParamType from click_didyoumean import DYMGroup from click_plugins import with_plugins @@ -48,34 +47,6 @@ {0}""") -class App(ParamType): - """Application option.""" - - name = "application" - - def convert(self, value, param, ctx): - try: - return find_app(value) - except ModuleNotFoundError as e: - if e.name != value: - exc = traceback.format_exc() - self.fail( - UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(value, exc) - ) - self.fail(UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND.format(e.name)) - except AttributeError as e: - attribute_name = e.args[0].capitalize() - self.fail(UNABLE_TO_LOAD_APP_APP_MISSING.format(attribute_name)) - except Exception: - exc = traceback.format_exc() - self.fail( - UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(value, exc) - ) - - -APP = App() - - if sys.version_info >= (3, 10): _PLUGINS = entry_points(group='celery.commands') else: @@ -91,7 +62,6 @@ def convert(self, value, param, ctx): '--app', envvar='APP', cls=CeleryOption, - type=APP, help_group="Global Options") @click.option('-b', '--broker', @@ -160,7 +130,26 @@ def celery(ctx, app, broker, result_backend, loader, config, workdir, os.environ['CELERY_CONFIG_MODULE'] = config if skip_checks: os.environ['CELERY_SKIP_CHECKS'] = 'true' - ctx.obj = CLIContext(app=app, no_color=no_color, workdir=workdir, + + try: + app_object = find_app(app) + except ModuleNotFoundError as e: + if e.name != app: + exc = traceback.format_exc() + ctx.fail( + UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc) + ) + ctx.fail(UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND.format(e.name)) + except AttributeError as e: + attribute_name = e.args[0].capitalize() + ctx.fail(UNABLE_TO_LOAD_APP_APP_MISSING.format(attribute_name)) + except Exception: + exc = traceback.format_exc() + ctx.fail( + UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc) + ) + + ctx.obj = CLIContext(app=app_object, no_color=no_color, workdir=workdir, quiet=quiet) # User options From fd27267c629b7c4d2bae1c2f375f4fe7089c21f2 Mon Sep 17 00:00:00 2001 From: Haim Daniel <64732931+haimjether@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:28:09 +0300 Subject: [PATCH 0878/1051] Add support for Google Pub/Sub transport (#9351) * Add support for Google Pub/Sub transport * Add events support. * Add mingle support. * Add documentation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update docs/getting-started/backends-and-brokers/gcpubsub.rst --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati --- README.rst | 7 +- celery/events/event.py | 2 +- celery/worker/consumer/mingle.py | 2 +- .../backends-and-brokers/gcpubsub.rst | 144 ++++++++++++++++++ .../backends-and-brokers/index.rst | 12 ++ docs/includes/installation.txt | 3 + 6 files changed, 167 insertions(+), 3 deletions(-) create mode 100644 docs/getting-started/backends-and-brokers/gcpubsub.rst diff --git a/README.rst b/README.rst index 2b73e1c13bd..6a55a8b4953 100644 --- a/README.rst +++ b/README.rst @@ -171,7 +171,7 @@ It supports... - **Message Transports** - - RabbitMQ_, Redis_, Amazon SQS + - RabbitMQ_, Redis_, Amazon SQS, Google Pub/Sub - **Concurrency** @@ -183,6 +183,7 @@ It supports... - memcached - SQLAlchemy, Django ORM - Apache Cassandra, IronCache, Elasticsearch + - Google Cloud Storage - **Serialization** @@ -372,6 +373,10 @@ Transports and Backends You should probably not use this in your requirements, it's here for informational purposes only. +:``celery[gcpubsub]``: + for using Google Pub/Sub as a message transport. + + .. _celery-installing-from-source: diff --git a/celery/events/event.py b/celery/events/event.py index a05ed7071a5..fd2ee1ebe50 100644 --- a/celery/events/event.py +++ b/celery/events/event.py @@ -55,7 +55,7 @@ def get_exchange(conn, name=EVENT_EXCHANGE_NAME): (from topic -> fanout). """ ex = copy(event_exchange) - if conn.transport.driver_type == 'redis': + if conn.transport.driver_type in {'redis', 'gcpubsub'}: # quick hack for Issue #436 ex.type = 'fanout' if name != ex.name: diff --git a/celery/worker/consumer/mingle.py b/celery/worker/consumer/mingle.py index 532ab75ea8e..d3f626e702b 100644 --- a/celery/worker/consumer/mingle.py +++ b/celery/worker/consumer/mingle.py @@ -22,7 +22,7 @@ class Mingle(bootsteps.StartStopStep): label = 'Mingle' requires = (Events,) - compatible_transports = {'amqp', 'redis'} + compatible_transports = {'amqp', 'redis', 'gcpubsub'} def __init__(self, c, without_mingle=False, **kwargs): self.enabled = not without_mingle and self.compatible_transport(c.app) diff --git a/docs/getting-started/backends-and-brokers/gcpubsub.rst b/docs/getting-started/backends-and-brokers/gcpubsub.rst new file mode 100644 index 00000000000..9fe381ee509 --- /dev/null +++ b/docs/getting-started/backends-and-brokers/gcpubsub.rst @@ -0,0 +1,144 @@ +.. _broker-gcpubsub: + +===================== + Using Google Pub/Sub +===================== + +.. versionadded:: 5.5 + +.. _broker-gcpubsub-installation: + +Installation +============ + +For the Google Pub/Sub support you have to install additional dependencies. +You can install both Celery and these dependencies in one go using +the ``celery[gcpubsub]`` :ref:`bundle `: + +.. code-block:: console + + $ pip install "celery[gcpubsub]" + +.. _broker-gcpubsub-configuration: + +Configuration +============= + +You have to specify gcpubsub and google project in the broker URL:: + + broker_url = 'gcpubsub://projects/project-id' + +where the URL format is: + +.. code-block:: text + + gcpubsub://projects/project-id + +Please note that you must prefix the project-id with `projects/` in the URL. + +The login credentials will be your regular GCP credentials set in the environment. + +Options +======= + +Resource expiry +--------------- + +The default settings are built to be as simple cost effective and intuitive as possible and to "just work". +The pubsub messages and subscriptions are set to expire after 24 hours, and can be set +by configuring the :setting:`expiration_seconds` setting:: + + expiration_seconds = 86400 + +.. seealso:: + + An overview of Google Cloud Pub/Sub settings can be found here: + + https://cloud.google.com/pubsub/docs + +.. _gcpubsub-ack_deadline_seconds: + +Ack Deadline Seconds +-------------------- + +The `ack_deadline_seconds` defines the number of seconds pub/sub infra shall wait +for the worker to acknowledge the task before the message is redelivered +to another worker. + +This option is set via the :setting:`broker_transport_options` setting:: + + broker_transport_options = {'ack_deadline_seconds': 60} # 1 minute. + +The default visibility timeout is 240 seconds, and the worker takes care for +automatically extending all pending messages it has. + +.. seealso:: + + An overview of Pub/Sub deadline can be found here: + + https://cloud.google.com/pubsub/docs/lease-management + + + +Polling Interval +---------------- + +The polling interval decides the number of seconds to sleep between +unsuccessful polls. This value can be either an int or a float. +By default the value is *0.1 seconds*. However it doesn't mean +that the worker will bomb the Pub/Sub API every 0.1 seconds when there's no +more messages to read, since it will be blocked by a blocking call to +the Pub/Sub API, which will only return when there's a new message to read +or after 10 seconds. + +The polling interval can be set via the :setting:`broker_transport_options` +setting:: + + broker_transport_options = {'polling_interval': 0.3} + +Very frequent polling intervals can cause *busy loops*, resulting in the +worker using a lot of CPU time. If you need sub-millisecond precision you +should consider using another transport, like `RabbitMQ `, +or `Redis `. + +Queue Prefix +------------ + +By default Celery will assign `kombu-` prefix to the queue names, +If you have other services using Pub/Sub you can configure it do so +using the :setting:`broker_transport_options` setting:: + + broker_transport_options = {'queue_name_prefix': 'kombu-'} + +.. _gcpubsub-results-configuration: + +Results +------- + +Google Cloud Storage (GCS) could be a good candidate to store the results. +See :ref:`gcs` for more information. + + +Caveats +======= + +- When using celery flower, an --inspect-timeout=10 option is required to + detect workers state correctly. + +- GCP Subscriptions idle subscriptions (no queued messages) + are configured to removal after 24hrs. + This aims at reducing costs. + +- Queued and unacked messages are set to auto cleanup after 24 hrs. + Same reason as above. + +- Channel queue size is approximation, and may not be accurate. + The reason is that the Pub/Sub API does not provide a way to get the + exact number of messages in a subscription. + +- Orphan (no subscriptions) Pub/Sub topics aren't being auto removed!! + Since GCP introduces a hard limit of 10k topics per project, + it is recommended to remove orphan topics manually in a periodic manner. + +- Max message size is limited to 10MB, as a workaround you can use GCS Backend to + store the message in GCS and pass the GCS URL to the task. diff --git a/docs/getting-started/backends-and-brokers/index.rst b/docs/getting-started/backends-and-brokers/index.rst index 0c5861fe0fb..ef4422246c3 100644 --- a/docs/getting-started/backends-and-brokers/index.rst +++ b/docs/getting-started/backends-and-brokers/index.rst @@ -21,6 +21,7 @@ Broker Instructions redis sqs kafka + gcpubsub .. _broker-overview: @@ -44,6 +45,8 @@ individual transport (see :ref:`broker_toc`). +---------------+--------------+----------------+--------------------+ | *Kafka* | Experimental | No | No | +---------------+--------------+----------------+--------------------+ +| *GC PubSub* | Experimental | Yes | Yes | ++---------------+--------------+----------------+--------------------+ Experimental brokers may be functional but they don't have dedicated maintainers. @@ -104,3 +107,12 @@ SQLAlchemy is a backend. It allows Celery to interface with MySQL, PostgreSQL, SQlite, and more. It is an ORM, and is the way Celery can use a SQL DB as a result backend. :ref:`See documentation for details ` + +GCPubSub +-------- + +Google Cloud Pub/Sub is a broker. + +If you already integrate tightly with Google Cloud, and are familiar with Pub/Sub, it presents a great option as a broker. It is extremely scalable and completely managed, and manages task delegation similarly to RabbitMQ. + +:ref:`See documentation for details ` diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index 7422f16fc65..b96758b03cf 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -118,6 +118,9 @@ Transports and Backends :``celery[gcs]``: for using the Google Cloud Storage as a result backend (*experimental*). +:``celery[gcpubsub]``: + for using the Google Cloud Pub/Sub as a message transport (*experimental*).. + .. _celery-installing-from-source: From 9f43916da91e71c56a9eb51ea657e54d128d2726 Mon Sep 17 00:00:00 2001 From: Haim Daniel <64732931+haimjether@users.noreply.github.com> Date: Thu, 17 Oct 2024 21:39:52 +0300 Subject: [PATCH 0879/1051] Add native incr support for GCSBackend (#9302) * Add native incr support for GCSBackend * Implement chord ref count on top of Google Firestore * Improve runtime and reduce amount of data read from GCS. * Skip test_gcs for Python 3.13 Skipping test until python-firestore package gets a support * Skip test_gcs for Python 3.13 Skipping test until python-firestore package gets a support * Pin grpcio version for pypy * Fix module level import --------- Co-authored-by: Asif Saif Uddin --- celery/backends/gcs.py | 227 +++++++++++++++++++- docs/userguide/configuration.rst | 14 +- requirements/extras/gcs.txt | 2 + t/unit/backends/test_gcs.py | 358 +++++++++++++++++++++++++++++-- 4 files changed, 574 insertions(+), 27 deletions(-) diff --git a/celery/backends/gcs.py b/celery/backends/gcs.py index c57c2e44960..d667a9ccced 100644 --- a/celery/backends/gcs.py +++ b/celery/backends/gcs.py @@ -8,35 +8,51 @@ from kombu.utils.functional import dictfilter from kombu.utils.url import url_to_parts -from celery.exceptions import ImproperlyConfigured +from celery.canvas import maybe_signature +from celery.exceptions import ChordError, ImproperlyConfigured +from celery.result import GroupResult, allow_join_result +from celery.utils.log import get_logger from .base import KeyValueStoreBackend try: import requests + from google.api_core import retry + from google.api_core.exceptions import Conflict + from google.api_core.retry import if_exception_type from google.cloud import storage from google.cloud.storage import Client from google.cloud.storage.retry import DEFAULT_RETRY except ImportError: storage = None +try: + from google.cloud import firestore, firestore_admin_v1 +except ImportError: + firestore = None + firestore_admin_v1 = None + + __all__ = ('GCSBackend',) -class GCSBackend(KeyValueStoreBackend): +logger = get_logger(__name__) + + +class GCSBackendBase(KeyValueStoreBackend): """Google Cloud Storage task result backend.""" def __init__(self, **kwargs): + if not storage: + raise ImproperlyConfigured( + 'You must install google-cloud-storage to use gcs backend' + ) super().__init__(**kwargs) - self._lock = RLock() + self._client_lock = RLock() self._pid = getpid() self._retry_policy = DEFAULT_RETRY self._client = None - if not storage: - raise ImproperlyConfigured( - 'You must install google-cloud-storage to use gcs backend' - ) conf = self.app.conf if self.url: url_params = self._params_from_url() @@ -96,7 +112,7 @@ def client(self): """Returns a storage client.""" # make sure it's thread-safe, as creating a new client is expensive - with self._lock: + with self._client_lock: if self._client and self._pid == getpid(): return self._client # make sure each process gets its own connection after a fork @@ -139,3 +155,198 @@ def _params_from_url(self): 'gcs_base_path': url_parts.path, **url_parts.query, } + + +class GCSBackend(GCSBackendBase): + """Google Cloud Storage task result backend. + + Uses Firestore for chord ref count. + """ + + implements_incr = True + supports_native_join = True + + # Firestore parameters + _collection_name = 'celery' + _field_count = 'chord_count' + _field_expires = 'expires_at' + + def __init__(self, **kwargs): + if not (firestore and firestore_admin_v1): + raise ImproperlyConfigured( + 'You must install google-cloud-firestore to use gcs backend' + ) + super().__init__(**kwargs) + + self._firestore_lock = RLock() + self._firestore_client = None + + self.firestore_project = self.app.conf.get( + 'firestore_project', self.project + ) + if not self._is_firestore_ttl_policy_enabled(): + raise ImproperlyConfigured( + f'Missing TTL policy to use gcs backend with ttl on ' + f'Firestore collection: {self._collection_name} ' + f'project: {self.firestore_project}' + ) + + @property + def firestore_client(self): + """Returns a firestore client.""" + + # make sure it's thread-safe, as creating a new client is expensive + with self._firestore_lock: + if self._firestore_client and self._pid == getpid(): + return self._firestore_client + # make sure each process gets its own connection after a fork + self._firestore_client = firestore.Client( + project=self.firestore_project + ) + self._pid = getpid() + return self._firestore_client + + def _is_firestore_ttl_policy_enabled(self): + client = firestore_admin_v1.FirestoreAdminClient() + + name = ( + f"projects/{self.firestore_project}" + f"/databases/(default)/collectionGroups/{self._collection_name}" + f"/fields/{self._field_expires}" + ) + request = firestore_admin_v1.GetFieldRequest(name=name) + field = client.get_field(request=request) + + ttl_config = field.ttl_config + return ttl_config and ttl_config.state in { + firestore_admin_v1.Field.TtlConfig.State.ACTIVE, + firestore_admin_v1.Field.TtlConfig.State.CREATING, + } + + def _apply_chord_incr(self, header_result_args, body, **kwargs): + key = self.get_key_for_chord(header_result_args[0]).decode() + self._expire_chord_key(key, 86400) + return super()._apply_chord_incr(header_result_args, body, **kwargs) + + def incr(self, key: bytes) -> int: + doc = self._firestore_document(key) + resp = doc.set( + {self._field_count: firestore.Increment(1)}, + merge=True, + retry=retry.Retry( + predicate=if_exception_type(Conflict), + initial=1.0, + maximum=180.0, + multiplier=2.0, + timeout=180.0, + ), + ) + return resp.transform_results[0].integer_value + + def on_chord_part_return(self, request, state, result, **kwargs): + """Chord part return callback. + + Called for each task in the chord. + Increments the counter stored in Firestore. + If the counter reaches the number of tasks in the chord, the callback + is called. + If the callback raises an exception, the chord is marked as errored. + If the callback returns a value, the chord is marked as successful. + """ + app = self.app + gid = request.group + if not gid: + return + key = self.get_key_for_chord(gid) + val = self.incr(key) + size = request.chord.get("chord_size") + if size is None: + deps = self._restore_deps(gid, request) + if deps is None: + return + size = len(deps) + if val > size: # pragma: no cover + logger.warning( + 'Chord counter incremented too many times for %r', gid + ) + elif val == size: + # Read the deps once, to reduce the number of reads from GCS ($$) + deps = self._restore_deps(gid, request) + if deps is None: + return + callback = maybe_signature(request.chord, app=app) + j = deps.join_native + try: + with allow_join_result(): + ret = j( + timeout=app.conf.result_chord_join_timeout, + propagate=True, + ) + except Exception as exc: # pylint: disable=broad-except + try: + culprit = next(deps._failed_join_report()) + reason = 'Dependency {0.id} raised {1!r}'.format( + culprit, + exc, + ) + except StopIteration: + reason = repr(exc) + + logger.exception('Chord %r raised: %r', gid, reason) + self.chord_error_from_stack(callback, ChordError(reason)) + else: + try: + callback.delay(ret) + except Exception as exc: # pylint: disable=broad-except + logger.exception('Chord %r raised: %r', gid, exc) + self.chord_error_from_stack( + callback, + ChordError(f'Callback error: {exc!r}'), + ) + finally: + deps.delete() + # Firestore doesn't have an exact ttl policy, so delete the key. + self._delete_chord_key(key) + + def _restore_deps(self, gid, request): + app = self.app + try: + deps = GroupResult.restore(gid, backend=self) + except Exception as exc: # pylint: disable=broad-except + callback = maybe_signature(request.chord, app=app) + logger.exception('Chord %r raised: %r', gid, exc) + self.chord_error_from_stack( + callback, + ChordError(f'Cannot restore group: {exc!r}'), + ) + return + if deps is None: + try: + raise ValueError(gid) + except ValueError as exc: + callback = maybe_signature(request.chord, app=app) + logger.exception('Chord callback %r raised: %r', gid, exc) + self.chord_error_from_stack( + callback, + ChordError(f'GroupResult {gid} no longer exists'), + ) + return deps + + def _delete_chord_key(self, key): + doc = self._firestore_document(key) + doc.delete() + + def _expire_chord_key(self, key, expires): + """Set TTL policy for a Firestore document. + + Firestore ttl data is typically deleted within 24 hours after its + expiration date. + """ + val_expires = datetime.utcnow() + timedelta(seconds=expires) + doc = self._firestore_document(key) + doc.set({self._field_expires: val_expires}, merge=True) + + def _firestore_document(self, key): + return self.firestore_client.collection( + self._collection_name + ).document(bytes_to_str(key)) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index ab17540ae6b..391dc35c8b9 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1829,7 +1829,7 @@ GCS backend settings .. note:: - This gcs backend driver requires :pypi:`google-cloud-storage`. + This gcs backend driver requires :pypi:`google-cloud-storage` and :pypi:`google-cloud-firestore`. To install, use :command:`gcs`: @@ -1843,6 +1843,7 @@ GCS backend settings GCS could be configured via the URL provided in :setting:`result_backend`, for example:: result_backend = 'gs://mybucket/some-prefix?gcs_project=myproject&ttl=600' + result_backend = 'gs://mybucket/some-prefix?gcs_project=myproject?firestore_project=myproject2&ttl=600' This backend requires the following configuration directives to be set: @@ -1902,6 +1903,17 @@ Allows to control the number of concurrent operations. For example:: gcs_threadpool_maxsize = 20 +``firestore_project`` +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: gcs_project. + +The Firestore project for Chord reference counting. Allows native chord ref counts. +If not specified defaults to :setting:`gcs_project`. +For example:: + + firestore_project = 'test-project2' + Example configuration ~~~~~~~~~~~~~~~~~~~~~ diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt index 7f34beca1b6..5abe8bea085 100644 --- a/requirements/extras/gcs.txt +++ b/requirements/extras/gcs.txt @@ -1 +1,3 @@ google-cloud-storage>=2.10.0 +google-cloud-firestore==2.18.0 +grpcio==1.66.2 diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py index 8ebfbc9aa58..32e10659136 100644 --- a/t/unit/backends/test_gcs.py +++ b/t/unit/backends/test_gcs.py @@ -1,13 +1,24 @@ -from datetime import datetime -from unittest.mock import Mock, call, patch +import sys +from datetime import datetime, timedelta +from unittest.mock import MagicMock, Mock, call, patch import pytest from google.cloud.exceptions import NotFound -from celery.backends.gcs import GCSBackend from celery.exceptions import ImproperlyConfigured +# Workaround until python-firestore is fixed +is_py313 = sys.version_info >= (3, 13) +if not is_py313: + from celery.backends.gcs import GCSBackend +else: + GCSBackend = None + +@pytest.mark.skipif( + is_py313, + reason="https://github.com/googleapis/python-firestore/issues/973", +) class test_GCSBackend: def setup_method(self): self.app.conf.gcs_bucket = 'bucket' @@ -18,12 +29,21 @@ def base_path(self, request): return request.param @pytest.fixture(params=[86400, None]) - def ttl(self, request): + def gcs_ttl(self, request): return request.param def test_missing_storage_module(self): with patch('celery.backends.gcs.storage', None): - with pytest.raises(ImproperlyConfigured, match='You must install'): + with pytest.raises( + ImproperlyConfigured, match='You must install' + ): + GCSBackend(app=self.app) + + def test_missing_firestore_module(self): + with patch('celery.backends.gcs.firestore', None): + with pytest.raises( + ImproperlyConfigured, match='You must install' + ): GCSBackend(app=self.app) def test_missing_bucket(self): @@ -38,6 +58,15 @@ def test_missing_project(self): with pytest.raises(ImproperlyConfigured, match='Missing project'): GCSBackend(app=self.app) + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_firestore_project(self, mock_firestore_ttl): + mock_firestore_ttl.return_value = True + b = GCSBackend(app=self.app) + assert b.firestore_project == 'project' + self.app.conf.firestore_project = 'project2' + b = GCSBackend(app=self.app) + assert b.firestore_project == 'project2' + def test_invalid_ttl(self): self.app.conf.gcs_bucket = 'bucket' self.app.conf.gcs_project = 'project' @@ -46,21 +75,38 @@ def test_invalid_ttl(self): with pytest.raises(ImproperlyConfigured, match='Invalid ttl'): GCSBackend(app=self.app) - def test_parse_url(self, base_path): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_firestore_ttl_policy_disabled(self, mock_firestore_ttl): + self.app.conf.gcs_bucket = 'bucket' + self.app.conf.gcs_project = 'project' + self.app.conf.gcs_ttl = 0 + + mock_firestore_ttl.return_value = False + with pytest.raises(ImproperlyConfigured, match='Missing TTL policy'): + GCSBackend(app=self.app) + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_parse_url(self, mock_firestore_ttl, base_path): self.app.conf.gcs_bucket = None self.app.conf.gcs_project = None + mock_firestore_ttl.return_value = True backend = GCSBackend( - app=self.app, url=f'gcs://bucket/{base_path}?gcs_project=project' + app=self.app, + url=f'gcs://bucket/{base_path}?gcs_project=project', ) assert backend.bucket_name == 'bucket' assert backend.base_path == base_path.strip('/') @patch.object(GCSBackend, '_is_bucket_lifecycle_rule_exists') - def test_ttl_missing_lifecycle_rule(self, mock_lifecycle): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_bucket_ttl_missing_lifecycle_rule( + self, mock_firestore_ttl, mock_lifecycle + ): self.app.conf.gcs_ttl = 86400 mock_lifecycle.return_value = False + mock_firestore_ttl.return_value = True with pytest.raises( ImproperlyConfigured, match='Missing lifecycle rule' ): @@ -68,9 +114,11 @@ def test_ttl_missing_lifecycle_rule(self, mock_lifecycle): mock_lifecycle.assert_called_once() @patch.object(GCSBackend, '_get_blob') - def test_get_key(self, mock_get_blob, base_path): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_get_key(self, mock_ttl, mock_get_blob, base_path): self.app.conf.gcs_base_path = base_path + mock_ttl.return_value = True mock_blob = Mock() mock_get_blob.return_value = mock_blob backend = GCSBackend(app=self.app) @@ -81,10 +129,19 @@ def test_get_key(self, mock_get_blob, base_path): @patch.object(GCSBackend, 'bucket') @patch.object(GCSBackend, '_get_blob') - def test_set_key(self, mock_get_blob, mock_bucket_prop, base_path, ttl): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_set_key( + self, + mock_firestore_ttl, + mock_get_blob, + mock_bucket_prop, + base_path, + gcs_ttl, + ): self.app.conf.gcs_base_path = base_path - self.app.conf.gcs_ttl = ttl + self.app.conf.gcs_ttl = gcs_ttl + mock_firestore_ttl.return_value = True mock_blob = Mock() mock_get_blob.return_value = mock_blob mock_bucket_prop.lifecycle_rules = [{'action': {'type': 'Delete'}}] @@ -94,14 +151,16 @@ def test_set_key(self, mock_get_blob, mock_bucket_prop, base_path, ttl): mock_blob.upload_from_string.assert_called_once_with( 'testvalue', retry=backend._retry_policy ) - if ttl: + if gcs_ttl: assert mock_blob.custom_time >= datetime.utcnow() @patch.object(GCSBackend, '_get_blob') - def test_get_missing_key(self, mock_get_blob): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_get_missing_key(self, mock_firestore_ttl, mock_get_blob): self.app.conf.gcs_bucket = 'bucket' self.app.conf.gcs_project = 'project' + mock_firestore_ttl.return_value = True mock_blob = Mock() mock_get_blob.return_value = mock_blob @@ -112,9 +171,13 @@ def test_get_missing_key(self, mock_get_blob): assert result is None @patch.object(GCSBackend, '_get_blob') - def test_delete_existing_key(self, mock_get_blob, base_path): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_delete_existing_key( + self, mock_firestore_ttl, mock_get_blob, base_path + ): self.app.conf.gcs_base_path = base_path + mock_firestore_ttl.return_value = True mock_blob = Mock() mock_get_blob.return_value = mock_blob mock_blob.exists.return_value = True @@ -126,9 +189,13 @@ def test_delete_existing_key(self, mock_get_blob, base_path): mock_blob.delete.assert_called_once() @patch.object(GCSBackend, '_get_blob') - def test_delete_missing_key(self, mock_get_blob, base_path): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_delete_missing_key( + self, mock_firestore_ttl, mock_get_blob, base_path + ): self.app.conf.gcs_base_path = base_path + mock_firestore_ttl.return_value = True mock_blob = Mock() mock_get_blob.return_value = mock_blob mock_blob.exists.return_value = False @@ -140,23 +207,278 @@ def test_delete_missing_key(self, mock_get_blob, base_path): mock_blob.delete.assert_not_called() @patch.object(GCSBackend, 'get') - def test_mget(self, mock_get, base_path): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_mget(self, mock_firestore_ttl, mock_get, base_path): self.app.conf.gcs_base_path = base_path + mock_firestore_ttl.return_value = True backend = GCSBackend(app=self.app) mock_get.side_effect = ['value1', 'value2'] result = backend.mget([b'key1', b'key2']) - mock_get.assert_has_calls([call(b'key1'), call(b'key2')], any_order=True) + mock_get.assert_has_calls( + [call(b'key1'), call(b'key2')], any_order=True + ) assert sorted(result) == sorted(['value1', 'value2']) + @patch.object(GCSBackend, 'client') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_bucket(self, mock_firestore_ttl, mock_client): + mock_bucket = MagicMock() + mock_client.bucket.return_value = mock_bucket + mock_firestore_ttl.return_value = True + backend = GCSBackend(app=self.app) + result = backend.bucket + mock_client.bucket.assert_called_once_with(backend.bucket_name) + assert result == mock_bucket + + @patch.object(GCSBackend, 'bucket') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_get_blob(self, mock_firestore_ttl, mock_bucket): + key = 'test_key' + mock_blob = MagicMock() + mock_bucket.blob.return_value = mock_blob + mock_firestore_ttl.return_value = True + + backend = GCSBackend(app=self.app) + result = backend._get_blob(key) + + key_bucket_path = ( + f'{backend.base_path}/{key}' if backend.base_path else key + ) + mock_bucket.blob.assert_called_once_with(key_bucket_path) + assert result == mock_blob + @patch('celery.backends.gcs.Client') @patch('celery.backends.gcs.getpid') - def test_new_client_after_fork(self, mock_pid, mock_client): + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_new_client_after_fork( + self, mock_firestore_ttl, mock_pid, mock_client + ): + mock_firestore_ttl.return_value = True mock_pid.return_value = 123 backend = GCSBackend(app=self.app) client1 = backend.client + assert client1 == backend.client mock_pid.assert_called() mock_client.assert_called() mock_pid.return_value = 456 mock_client.return_value = Mock() assert client1 != backend.client mock_client.assert_called_with(project='project') + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch('celery.backends.gcs.firestore.Client') + @patch('celery.backends.gcs.getpid') + def test_new_firestore_client_after_fork( + self, mock_pid, mock_firestore_client, mock_firestore_ttl + ): + mock_firestore_instance = MagicMock() + mock_firestore_client.return_value = mock_firestore_instance + + backend = GCSBackend(app=self.app) + mock_pid.return_value = 123 + client1 = backend.firestore_client + client2 = backend.firestore_client + + mock_firestore_client.assert_called_once_with( + project=backend.firestore_project + ) + assert client1 == mock_firestore_instance + assert client2 == mock_firestore_instance + assert backend._pid == 123 + mock_pid.return_value = 456 + _ = backend.firestore_client + assert backend._pid == 456 + + @patch('celery.backends.gcs.firestore_admin_v1.FirestoreAdminClient') + @patch('celery.backends.gcs.firestore_admin_v1.GetFieldRequest') + def test_is_firestore_ttl_policy_enabled( + self, mock_get_field_request, mock_firestore_admin_client + ): + mock_client_instance = MagicMock() + mock_firestore_admin_client.return_value = mock_client_instance + mock_field = MagicMock() + mock_field.ttl_config.state = 2 # State.ENABLED + mock_client_instance.get_field.return_value = mock_field + + backend = GCSBackend(app=self.app) + result = backend._is_firestore_ttl_policy_enabled() + + assert result + mock_field.ttl_config.state = 3 # State.NEEDS_REPAIR + mock_client_instance.get_field.return_value = mock_field + result = backend._is_firestore_ttl_policy_enabled() + assert not result + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch.object(GCSBackend, '_expire_chord_key') + @patch.object(GCSBackend, 'get_key_for_chord') + @patch('celery.backends.gcs.KeyValueStoreBackend._apply_chord_incr') + def test_apply_chord_incr( + self, + mock_super_apply_chord_incr, + mock_get_key_for_chord, + mock_expire_chord_key, + mock_firestore_ttl, + ): + mock_firestore_ttl.return_value = True + mock_get_key_for_chord.return_value = b'group_key' + header_result_args = [MagicMock()] + body = MagicMock() + + backend = GCSBackend(app=self.app) + backend._apply_chord_incr(header_result_args, body) + + mock_get_key_for_chord.assert_called_once_with(header_result_args[0]) + mock_expire_chord_key.assert_called_once_with('group_key', 86400) + mock_super_apply_chord_incr.assert_called_once_with( + header_result_args, body + ) + + @patch.object(GCSBackend, '_firestore_document') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_incr(self, mock_firestore_ttl, mock_firestore_document): + self.app.conf.gcs_bucket = 'bucket' + self.app.conf.gcs_project = 'project' + + mock_firestore_ttl.return_value = True + gcs_backend = GCSBackend(app=self.app) + gcs_backend.incr(b'some-key') + assert mock_firestore_document.call_count == 1 + + @patch('celery.backends.gcs.maybe_signature') + @patch.object(GCSBackend, 'incr') + @patch.object(GCSBackend, '_restore_deps') + @patch.object(GCSBackend, '_delete_chord_key') + @patch('celery.backends.gcs.allow_join_result') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_on_chord_part_return( + self, + mock_firestore_ttl, + mock_allow_join_result, + mock_delete_chord_key, + mock_restore_deps, + mock_incr, + mock_maybe_signature, + ): + request = MagicMock() + request.group = 'group_id' + request.chord = {'chord_size': 2} + state = MagicMock() + result = MagicMock() + mock_firestore_ttl.return_value = True + mock_incr.return_value = 2 + mock_restore_deps.return_value = MagicMock() + mock_restore_deps.return_value.join_native.return_value = [ + 'result1', + 'result2', + ] + mock_maybe_signature.return_value = MagicMock() + + b = GCSBackend(app=self.app) + b.on_chord_part_return(request, state, result) + + group_key = b.chord_keyprefix + b'group_id' + mock_incr.assert_called_once_with(group_key) + mock_restore_deps.assert_called_once_with('group_id', request) + mock_maybe_signature.assert_called_once_with( + request.chord, app=self.app + ) + mock_restore_deps.return_value.join_native.assert_called_once_with( + timeout=self.app.conf.result_chord_join_timeout, + propagate=True, + ) + mock_maybe_signature.return_value.delay.assert_called_once_with( + ['result1', 'result2'] + ) + mock_delete_chord_key.assert_called_once_with(group_key) + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch('celery.backends.gcs.GroupResult.restore') + @patch('celery.backends.gcs.maybe_signature') + @patch.object(GCSBackend, 'chord_error_from_stack') + def test_restore_deps( + self, + mock_chord_error_from_stack, + mock_maybe_signature, + mock_group_result_restore, + mock_firestore_ttl, + ): + gid = 'group_id' + request = MagicMock() + mock_group_result_restore.return_value = MagicMock() + + backend = GCSBackend(app=self.app) + deps = backend._restore_deps(gid, request) + + mock_group_result_restore.assert_called_once_with( + gid, backend=backend + ) + assert deps is not None + mock_chord_error_from_stack.assert_not_called() + + mock_group_result_restore.side_effect = Exception('restore error') + deps = backend._restore_deps(gid, request) + mock_maybe_signature.assert_called_with(request.chord, app=self.app) + mock_chord_error_from_stack.assert_called_once() + assert deps is None + + mock_group_result_restore.side_effect = None + mock_group_result_restore.return_value = None + deps = backend._restore_deps(gid, request) + mock_chord_error_from_stack.assert_called() + assert deps is None + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch.object(GCSBackend, '_firestore_document') + def test_delete_chord_key( + self, mock_firestore_document, mock_firestore_ttl + ): + key = 'test_key' + mock_document = MagicMock() + mock_firestore_document.return_value = mock_document + + backend = GCSBackend(app=self.app) + backend._delete_chord_key(key) + + mock_firestore_document.assert_called_once_with(key) + mock_document.delete.assert_called_once() + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch.object(GCSBackend, '_firestore_document') + def test_expire_chord_key( + self, mock_firestore_document, mock_firestore_ttl + ): + key = 'test_key' + expires = 86400 + mock_document = MagicMock() + mock_firestore_document.return_value = mock_document + expected_expiry = datetime.utcnow() + timedelta(seconds=expires) + + backend = GCSBackend(app=self.app) + backend._expire_chord_key(key, expires) + + mock_firestore_document.assert_called_once_with(key) + mock_document.set.assert_called_once() + args, kwargs = mock_document.set.call_args + assert backend._field_expires in args[0] + assert args[0][backend._field_expires] >= expected_expiry + + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + @patch.object(GCSBackend, 'firestore_client') + def test_firestore_document( + self, mock_firestore_client, mock_firestore_ttl + ): + key = b'test_key' + mock_collection = MagicMock() + mock_document = MagicMock() + mock_firestore_client.collection.return_value = mock_collection + mock_collection.document.return_value = mock_document + + backend = GCSBackend(app=self.app) + result = backend._firestore_document(key) + + mock_firestore_client.collection.assert_called_once_with( + backend._collection_name + ) + mock_collection.document.assert_called_once_with('test_key') + assert result == mock_document From 1fc305837a785dda8ed8cb196aec654427fe433e Mon Sep 17 00:00:00 2001 From: Max Nikitenko Date: Thu, 17 Oct 2024 21:43:13 +0300 Subject: [PATCH 0880/1051] =?UTF-8?q?fix(perform=5Fpending=5Foperations):?= =?UTF-8?q?=20prevent=20task=20duplication=20on=20shutdown=E2=80=A6=20(#93?= =?UTF-8?q?48)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(perform_pending_operations): prevent task duplication on shutdown in gevent/eventlet modes Resolved an issue where completed tasks were restored on shutdown, causing duplicates for recursive tasks in gevent/eventlet modes (#5663). The problem was that `perform_pending_operations()` was not called before `SystemExit` was raised in `synloop`. Closes #5663 * fix(consumer): force execute pending operations before worker shutdown In cases of low concurrency `1` in `gevent/eventlet` worker pools—where the consumer has a `loop` when a worker processes received shutdown signal while a task has just been piked or is currently in progress and finishes successfully, and we set late-ack option the acknowledgment is passed to `_pending_operations`. However, these operations would never be executed as the event loop has already finished all previous operations and exited with `SysExit`. Therefore, we need to forcefully execute any pending operations before the shutdown, if any exist. Closes #5663 --------- Co-authored-by: Max Nikitenko --- celery/worker/consumer/consumer.py | 1 + celery/worker/loops.py | 12 +++++- t/unit/test_loops.py | 57 +++++++++++++++++++++++++++++ t/unit/worker/test_consumer.py | 59 ++++++++++++++++++++++++++++++ 4 files changed, 127 insertions(+), 2 deletions(-) create mode 100644 t/unit/test_loops.py diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 8241a976021..d1b38232c6e 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -412,6 +412,7 @@ def register_with_event_loop(self, hub): ) def shutdown(self): + self.perform_pending_operations() self.blueprint.shutdown(self) def stop(self): diff --git a/celery/worker/loops.py b/celery/worker/loops.py index 0630e679fdd..1f9e589eeef 100644 --- a/celery/worker/loops.py +++ b/celery/worker/loops.py @@ -119,8 +119,10 @@ def synloop(obj, connection, consumer, blueprint, hub, qos, obj.on_ready() - while blueprint.state == RUN and obj.connection: - state.maybe_shutdown() + def _loop_cycle(): + """ + Perform one iteration of the blocking event loop. + """ if heartbeat_error[0] is not None: raise heartbeat_error[0] if qos.prev != qos.value: @@ -133,3 +135,9 @@ def synloop(obj, connection, consumer, blueprint, hub, qos, except OSError: if blueprint.state == RUN: raise + + while blueprint.state == RUN and obj.connection: + try: + state.maybe_shutdown() + finally: + _loop_cycle() diff --git a/t/unit/test_loops.py b/t/unit/test_loops.py new file mode 100644 index 00000000000..a2039941999 --- /dev/null +++ b/t/unit/test_loops.py @@ -0,0 +1,57 @@ +from unittest.mock import Mock, patch + +import pytest + +from celery import bootsteps +from celery.worker.loops import synloop + + +def test_synloop_perform_pending_operations_on_system_exit(): + # Mock dependencies + obj = Mock() + connection = Mock() + consumer = Mock() + blueprint = Mock() + hub = Mock() + qos = Mock() + heartbeat = Mock() + clock = Mock() + + # Set up the necessary attributes + obj.create_task_handler.return_value = Mock() + obj.perform_pending_operations = Mock() + obj.on_ready = Mock() + obj.pool.is_green = False + obj.connection = True + + blueprint.state = bootsteps.RUN # Simulate RUN state + + qos.prev = qos.value = Mock() + + # Mock state.maybe_shutdown to raise SystemExit + with patch("celery.worker.loops.state") as mock_state: + mock_state.maybe_shutdown.side_effect = SystemExit + + # Call synloop and expect SystemExit to be raised + with pytest.raises(SystemExit): + synloop( + obj, + connection, + consumer, + blueprint, + hub, + qos, + heartbeat, + clock, + hbrate=2.0, + ) + + # Assert that perform_pending_operations was called even after SystemExit + obj.perform_pending_operations.assert_called_once() + + # Assert that connection.drain_events was called + connection.drain_events.assert_called_with(timeout=2.0) + + # Assert other important method calls + obj.on_ready.assert_called_once() + consumer.consume.assert_called_once() diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index ae677a7bfad..b43471134b2 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -47,6 +47,7 @@ def setup_method(self): @self.app.task(shared=False) def add(x, y): return x + y + self.add = add def test_repr(self): @@ -147,6 +148,7 @@ def __enter__(self): def __exit__(self, *args): pass + c.qos._mutex = MutexMock() assert c._restore_prefetch_count_after_connection_restart(None) is None @@ -266,6 +268,7 @@ def test_max_restarts_exceeded(self): def se(*args, **kwargs): c.blueprint.state = CLOSE raise RestartFreqExceeded() + c._restart_state.step.side_effect = se c.blueprint.start.side_effect = socket.error() @@ -313,6 +316,7 @@ def test_too_many_open_files_raises_error(self): def _closer(self, c): def se(*args, **kwargs): c.blueprint.state = CLOSE + return se @pytest.mark.parametrize("broker_connection_retry", [True, False]) @@ -531,6 +535,61 @@ def test_start_raises_connection_error(self, assert expected_connection_retry_type in record.msg +class test_Consumer_PerformPendingOperations(ConsumerTestCase): + + def test_perform_pending_operations_all_success(self): + """ + Test that all pending operations are processed successfully when `once=False`. + """ + c = self.get_consumer(no_hub=True) + + # Create mock operations + mock_operation_1 = Mock() + mock_operation_2 = Mock() + + # Add mock operations to _pending_operations + c._pending_operations = [mock_operation_1, mock_operation_2] + + # Call perform_pending_operations + c.perform_pending_operations() + + # Assert that all operations were called + mock_operation_1.assert_called_once() + mock_operation_2.assert_called_once() + + # Ensure all pending operations are cleared + assert len(c._pending_operations) == 0 + + def test_perform_pending_operations_with_exception(self): + """ + Test that pending operations are processed even if one raises an exception, and + the exception is logged when `once=False`. + """ + c = self.get_consumer(no_hub=True) + + # Mock operations: one failing, one successful + mock_operation_fail = Mock(side_effect=Exception("Test Exception")) + mock_operation_success = Mock() + + # Add operations to _pending_operations + c._pending_operations = [mock_operation_fail, mock_operation_success] + + # Patch logger to avoid logging during the test + with patch('celery.worker.consumer.consumer.logger.exception') as mock_logger: + # Call perform_pending_operations + c.perform_pending_operations() + + # Assert that both operations were attempted + mock_operation_fail.assert_called_once() + mock_operation_success.assert_called_once() + + # Ensure the exception was logged + mock_logger.assert_called_once() + + # Ensure all pending operations are cleared + assert len(c._pending_operations) == 0 + + class test_Heart: def test_start(self): From b72055eb7161c71617ceab05e8854643eb1e5bc8 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Thu, 17 Oct 2024 14:37:39 -0700 Subject: [PATCH 0881/1051] Update grpcio from 1.66.2 to 1.67.0 (#9365) --- requirements/extras/gcs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt index 5abe8bea085..6db37e0f993 100644 --- a/requirements/extras/gcs.txt +++ b/requirements/extras/gcs.txt @@ -1,3 +1,3 @@ google-cloud-storage>=2.10.0 google-cloud-firestore==2.18.0 -grpcio==1.66.2 +grpcio==1.67.0 From 2f4892aaea61eb393069802090810a2f9be43170 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Thu, 17 Oct 2024 15:14:27 -0700 Subject: [PATCH 0882/1051] Update google-cloud-firestore to 2.19.0 (#9364) * Update google-cloud-firestore from 2.18.0 to 2.19.0 * Update requirements/extras/gcs.txt * Update requirements/extras/gcs.txt --------- Co-authored-by: Tomer Nosrati --- requirements/extras/gcs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt index 6db37e0f993..363a19b8c8b 100644 --- a/requirements/extras/gcs.txt +++ b/requirements/extras/gcs.txt @@ -1,3 +1,3 @@ google-cloud-storage>=2.10.0 -google-cloud-firestore==2.18.0 +google-cloud-firestore==2.19.0 grpcio==1.67.0 From 8b57928bc15d75170657a20450fe8685c62207d2 Mon Sep 17 00:00:00 2001 From: hmn falahi <46359682+hmnfalahi@users.noreply.github.com> Date: Fri, 18 Oct 2024 20:06:53 +0330 Subject: [PATCH 0883/1051] Annotate celery/utils/timer2.py (#9362) * Annotate celery/utils/timer2.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/utils/timer2.py | 54 +++++++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/celery/utils/timer2.py b/celery/utils/timer2.py index 88d8ffd77ad..adfdb403a3a 100644 --- a/celery/utils/timer2.py +++ b/celery/utils/timer2.py @@ -10,6 +10,7 @@ from itertools import count from threading import TIMEOUT_MAX as THREAD_TIMEOUT_MAX from time import sleep +from typing import Any, Callable, Iterator, Optional, Tuple from kombu.asynchronous.timer import Entry from kombu.asynchronous.timer import Timer as Schedule @@ -30,20 +31,23 @@ class Timer(threading.Thread): Entry = Entry Schedule = Schedule - running = False - on_tick = None + running: bool = False + on_tick: Optional[Callable[[float], None]] = None - _timer_count = count(1) + _timer_count: count = count(1) if TIMER_DEBUG: # pragma: no cover - def start(self, *args, **kwargs): + def start(self, *args: Any, **kwargs: Any) -> None: import traceback print('- Timer starting') traceback.print_stack() super().start(*args, **kwargs) - def __init__(self, schedule=None, on_error=None, on_tick=None, - on_start=None, max_interval=None, **kwargs): + def __init__(self, schedule: Optional[Schedule] = None, + on_error: Optional[Callable[[Exception], None]] = None, + on_tick: Optional[Callable[[float], None]] = None, + on_start: Optional[Callable[['Timer'], None]] = None, + max_interval: Optional[float] = None, **kwargs: Any) -> None: self.schedule = schedule or self.Schedule(on_error=on_error, max_interval=max_interval) self.on_start = on_start @@ -60,8 +64,10 @@ def __init__(self, schedule=None, on_error=None, on_tick=None, self.daemon = True self.name = f'Timer-{next(self._timer_count)}' - def _next_entry(self): + def _next_entry(self) -> Optional[float]: with self.not_empty: + delay: Optional[float] + entry: Optional[Entry] delay, entry = next(self.scheduler) if entry is None: if delay is None: @@ -70,10 +76,10 @@ def _next_entry(self): return self.schedule.apply_entry(entry) __next__ = next = _next_entry # for 2to3 - def run(self): + def run(self) -> None: try: self.running = True - self.scheduler = iter(self.schedule) + self.scheduler: Iterator[Tuple[Optional[float], Optional[Entry]]] = iter(self.schedule) while not self.__is_shutdown.is_set(): delay = self._next_entry() @@ -94,61 +100,61 @@ def run(self): sys.stderr.flush() os._exit(1) - def stop(self): + def stop(self) -> None: self.__is_shutdown.set() if self.running: self.__is_stopped.wait() self.join(THREAD_TIMEOUT_MAX) self.running = False - def ensure_started(self): + def ensure_started(self) -> None: if not self.running and not self.is_alive(): if self.on_start: self.on_start(self) self.start() - def _do_enter(self, meth, *args, **kwargs): + def _do_enter(self, meth: str, *args: Any, **kwargs: Any) -> Entry: self.ensure_started() with self.mutex: entry = getattr(self.schedule, meth)(*args, **kwargs) self.not_empty.notify() return entry - def enter(self, entry, eta, priority=None): + def enter(self, entry: Entry, eta: float, priority: Optional[int] = None) -> Entry: return self._do_enter('enter_at', entry, eta, priority=priority) - def call_at(self, *args, **kwargs): + def call_at(self, *args: Any, **kwargs: Any) -> Entry: return self._do_enter('call_at', *args, **kwargs) - def enter_after(self, *args, **kwargs): + def enter_after(self, *args: Any, **kwargs: Any) -> Entry: return self._do_enter('enter_after', *args, **kwargs) - def call_after(self, *args, **kwargs): + def call_after(self, *args: Any, **kwargs: Any) -> Entry: return self._do_enter('call_after', *args, **kwargs) - def call_repeatedly(self, *args, **kwargs): + def call_repeatedly(self, *args: Any, **kwargs: Any) -> Entry: return self._do_enter('call_repeatedly', *args, **kwargs) - def exit_after(self, secs, priority=10): + def exit_after(self, secs: float, priority: int = 10) -> None: self.call_after(secs, sys.exit, priority) - def cancel(self, tref): + def cancel(self, tref: Entry) -> None: tref.cancel() - def clear(self): + def clear(self) -> None: self.schedule.clear() - def empty(self): + def empty(self) -> bool: return not len(self) - def __len__(self): + def __len__(self) -> int: return len(self.schedule) - def __bool__(self): + def __bool__(self) -> bool: """``bool(timer)``.""" return True __nonzero__ = __bool__ @property - def queue(self): + def queue(self) -> list: return self.schedule.queue From 75863fab829706b116b2f42f948e346121950a7c Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Fri, 18 Oct 2024 14:27:48 -0700 Subject: [PATCH 0884/1051] Update cryptography from 43.0.1 to 43.0.3 (#9366) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index ccb822680ef..75287dd9fb0 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==43.0.1 +cryptography==43.0.3 From 61e51731ac1212a9fa5f9de846d7ca8321d8d70f Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 20 Oct 2024 04:31:07 -0700 Subject: [PATCH 0885/1051] Update mypy from 1.12.0 to 1.12.1 (#9368) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index a4031d1168b..e100132eb36 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.12.0; platform_python_implementation=="CPython" +mypy==1.12.1; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt From 514ebfcb316b1e0dc517febddd1d6febb7fb042a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 20:30:17 +0300 Subject: [PATCH 0886/1051] [pre-commit.ci] pre-commit autoupdate (#9369) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.11.2 → v1.12.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.11.2...v1.12.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f5b61ccd17c..36e2f4d81d3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.2 + rev: v1.12.1 hooks: - id: mypy pass_filenames: false From 8f20f2fd2279e2387bfcfd7e30f4b38207dc24d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 02:27:10 +0300 Subject: [PATCH 0887/1051] Bump mypy from 1.12.1 to 1.13.0 (#9373) Bumps [mypy](https://github.com/python/mypy) from 1.12.1 to 1.13.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.12.1...v1.13.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index e100132eb36..2eb5e7affc1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,7 +8,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.12.1; platform_python_implementation=="CPython" +mypy==1.13.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt From 706024358f0277f4fe716ae8a9a50a0f38eac64e Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Mon, 28 Oct 2024 13:07:54 +0200 Subject: [PATCH 0888/1051] Pass timeout and confirm_timeout to producer.publish(). (#9374) --- celery/app/amqp.py | 4 +++- t/unit/app/test_amqp.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/celery/app/amqp.py b/celery/app/amqp.py index 575117d13e1..8dcec363053 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -467,7 +467,8 @@ def send_task_message(producer, name, message, retry=None, retry_policy=None, serializer=None, delivery_mode=None, compression=None, declare=None, - headers=None, exchange_type=None, **kwargs): + headers=None, exchange_type=None, + timeout=None, confirm_timeout=None, **kwargs): retry = default_retry if retry is None else retry headers2, properties, body, sent_event = message if headers: @@ -528,6 +529,7 @@ def send_task_message(producer, name, message, retry=retry, retry_policy=_rp, delivery_mode=delivery_mode, declare=declare, headers=headers2, + timeout=timeout, confirm_timeout=confirm_timeout, **properties ) if after_receivers: diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index 1293eb5d15e..4b46148d144 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -325,6 +325,22 @@ def test_send_task_message__with_delivery_mode(self): ) assert prod.publish.call_args[1]['delivery_mode'] == 33 + def test_send_task_message__with_timeout(self): + prod = Mock(name='producer') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, + timeout=1, + ) + assert prod.publish.call_args[1]['timeout'] == 1 + + def test_send_task_message__with_confirm_timeout(self): + prod = Mock(name='producer') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, + confirm_timeout=1, + ) + assert prod.publish.call_args[1]['confirm_timeout'] == 1 + def test_send_task_message__with_receivers(self): mocked_receiver = ((Mock(), Mock()), Mock()) with patch('celery.signals.task_sent.receivers', [mocked_receiver]): From 36bee9e48ead607a4b33bf1916fa3ee327f5c59e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 19:05:56 +0200 Subject: [PATCH 0889/1051] [pre-commit.ci] pre-commit autoupdate (#9379) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.18.0 → v3.19.0](https://github.com/asottile/pyupgrade/compare/v3.18.0...v3.19.0) - [github.com/pre-commit/mirrors-mypy: v1.12.1 → v1.13.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.12.1...v1.13.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 36e2f4d81d3..779461c2657 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.18.0 + rev: v3.19.0 hooks: - id: pyupgrade args: ["--py38-plus"] @@ -39,7 +39,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.12.1 + rev: v1.13.0 hooks: - id: mypy pass_filenames: false From fb39f230fb25d7ca885de533d66b373c363df1a5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 29 Oct 2024 15:33:55 +0200 Subject: [PATCH 0890/1051] Bump Kombu to v5.5.0rc2 (#9382) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 0e640526579..bed03e2bd56 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.0rc1,<6.0 +kombu>=5.5.0rc2,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 71519d80f2a5ce77055541324dd545a015dca7d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 15:31:42 +0200 Subject: [PATCH 0891/1051] Bump pytest-cov from 5.0.0 to 6.0.0 (#9388) * Bump pytest-cov from 5.0.0 to 6.0.0 Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 5.0.0 to 6.0.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v5.0.0...v6.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update requirements/test-ci-base.txt --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati --- requirements/test-ci-base.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 6238dd48914..05ee50df850 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,5 @@ -pytest-cov==5.0.0 +pytest-cov==5.0.0; python_version<"3.9" +pytest-cov==6.0.0; python_version>="3.9" pytest-github-actions-annotate-failures==0.2.0 -r extras/redis.txt -r extras/sqlalchemy.txt From 8eecebbb9551449587be56ddcf13c4e2549774cb Mon Sep 17 00:00:00 2001 From: Mathias Ertl Date: Sat, 2 Nov 2024 01:21:43 +0200 Subject: [PATCH 0892/1051] default strict to False for pydantic tasks (#9393) * default strict to False for pydantic tasks * update docs --- celery/app/base.py | 2 +- docs/userguide/tasks.rst | 2 +- t/unit/app/test_app.py | 22 ++++++++++++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 833818344de..7af07de9410 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -548,7 +548,7 @@ def _task_from_fun( base=None, bind=False, pydantic: bool = False, - pydantic_strict: bool = True, + pydantic_strict: bool = False, pydantic_context: typing.Optional[typing.Dict[str, typing.Any]] = None, pydantic_dump_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = None, **options, diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 505522b3cf5..60c5e89f259 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -893,7 +893,7 @@ There are a few more options influencing Pydantic behavior: .. attribute:: Task.pydantic_strict By default, `strict mode `_ - is enabled. You can pass ``False`` to disable strict model validation. + is disabled. You can pass ``True`` to enable strict model validation. .. attribute:: Task.pydantic_context diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 4bf1887b236..4d132a537d3 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -615,6 +615,28 @@ def foo(arg: ArgModel, kwarg: KwargModel = kwarg_default) -> ReturnModel: assert foo(arg={'arg_value': 5}, kwarg={'kwarg_value': 6}) == {'ret_value': 2} check.assert_called_once_with(ArgModel(arg_value=5), kwarg=KwargModel(kwarg_value=6)) + def test_task_with_pydantic_with_non_strict_validation(self): + """Test a pydantic task with where Pydantic has to apply non-strict validation.""" + + class Model(BaseModel): + value: timedelta + + with self.Celery() as app: + check = Mock() + + @app.task(pydantic=True) + def foo(arg: Model) -> Model: + check(arg) + return Model(value=timedelta(days=arg.value.days * 2)) + + assert foo({'value': timedelta(days=1)}) == {'value': 'P2D'} + check.assert_called_once_with(Model(value=timedelta(days=1))) + check.reset_mock() + + # Pass a serialized value to the task + assert foo({'value': 'P3D'}) == {'value': 'P6D'} + check.assert_called_once_with(Model(value=timedelta(days=3))) + def test_task_with_pydantic_with_optional_pydantic_args(self): """Test pydantic task receiving and returning an optional argument.""" class ArgModel(BaseModel): From 53fa7bc5fb460eab140296de45ab738b4cd67021 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 3 Nov 2024 17:31:33 +0200 Subject: [PATCH 0893/1051] Only log that global QoS is disabled if using amqp. (#9395) Fixes #9385 --- celery/worker/consumer/tasks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index 0be966755af..9f5d9408845 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -42,8 +42,6 @@ def start(self, c): c.update_strategies() qos_global = self.qos_global(c) - if qos_global is False: - logger.info("Global QoS is disabled. Prefetch count in now static.") # set initial prefetch count c.connection.default_channel.basic_qos( @@ -95,6 +93,7 @@ def qos_global(self, c) -> bool: using_quorum_queues, qname = self.detect_quorum_queues(c) if using_quorum_queues: qos_global = False + logger.info("Global QoS is disabled. Prefetch count in now static.") # The ETA tasks mechanism requires additional work for Celery to fully support # quorum queues. Warn the user that ETA tasks may not function as expected until # this is done so we can at least support quorum queues partially for now. From b468b91cbb84e3a0b84946607af5a90a58f0a643 Mon Sep 17 00:00:00 2001 From: Joe Zhou Date: Wed, 6 Nov 2024 11:04:44 -0500 Subject: [PATCH 0894/1051] update sponsorship logo (#9398) --- README.rst | 4 +- docs/images/dragonfly.png | Bin 14258 -> 0 bytes docs/images/dragonfly.svg | 89 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 91 insertions(+), 2 deletions(-) delete mode 100644 docs/images/dragonfly.png create mode 100644 docs/images/dragonfly.svg diff --git a/README.rst b/README.rst index 6a55a8b4953..aa24b66953b 100644 --- a/README.rst +++ b/README.rst @@ -33,9 +33,9 @@ Sponsor `Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, Dragonfly frees developers from the limits of traditional in-memory data stores. -.. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.png +.. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.svg :alt: Dragonfly logo - :width: 150px + :width: 150px diff --git a/docs/images/dragonfly.png b/docs/images/dragonfly.png deleted file mode 100644 index 54b9c4dd0a2a88dc56d4275abddac20c53327b81..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14258 zcmch8hds`TpiV@Vk$P%lnMi>v>-Dea?A_*40*_rG`)g06?pzs;CbD|Iq~iGQ{Oe zV9SD-gAe$N>ZPigCjihekbcO3^o$!|BbleZiaby;#JT}KP}s|9$pJuhJk6OkB>+58 zQB#yN^dsAuoAbVz8ftK@FZJ%QMauAHQ}LGH|Azv0p%IOoOq1S03XQ;`)B;w}q|mEHY?FOQ^|=92}xLz4<4(YTxidQa}?DfB!*2 zxAD`6xtX}gpw+=5c$BJ}s`yY^{5YagpkYyto}>5c6Yt zL*tl8rmr_t_N(-VFypUIDKlkmzA-89mI7t@V$md}_1czkY+92}$r``!V(RhNLdKDp z2_68@Ey+3Q8JY4cF@D&(Deeo8*e{S^oiywclW0kM4N~UQ3n&{ipdLa91h$MDduKk5 zpK@x1Xh~G?Doxe5GXsF^?}^f~ghSImuvNd3ludCRw(8NEsqk@=QVGW{|@;n}*_4T87Us4?!Bvpgwg!@?(s0nnY3`!kx;RhSUbup;oCP(@Es! z7;kMsT38-PLb$xJtuWkhGyDj4z$%0(uJBBJ1pBFzf6@!Jgv5b3sjG}EBX}y;q3!$n z;JmB)-_3uj3`_PxGjExL64ZQ9TtRL)6b>sp`B0K$$J7>x2^~s(RIxGZKmh;`@A4O8 z4E@sYIo%=AUJh0p7k5`gZb7xKYr0$o0B-27Y$ORYTg)4Y1Nn&`I8@I_PnZjnA6Iy~tl{Q12%J9wlZ0`|yC|=8Q(tx|% ziF?s-@7OdaBa?e}Grx+R<8Dl@+(d#9xSyj|piEg#v2cYvh+$z>OuNyAd!5SzRCAWg zWXh^oPkG&C(9b?j8GAG3IK4*>unI95`oySJ;@{s~PRI9>jEK^~$mFh*V70N?a)J8p z&~zy)NdCJw*u(tOX_WVhHcM;dbEnOcuNu2UV&0Rj%CU%a@$IR}hs*Z>!1OJMUh^k3 zKuZE%YVxV~IrZ|_LP$@Pz}*RuFOO<|6SaZHTV$l5h8SWlFvXYk!dN;M!S@6LDu*$0 zR}I?(^QTTQS9=oCZF~Wt?fn>-^JAS7hz=3}RM&iS4Uv(GozzJPLR?xvCEm})Ab-YP z%g5PVxTnxC6GTWoUR(j&D2VcQL8SP!81mcATofsndG)w!ne)yZF{G)0o-LbOUt0D| z=}NUxx?=wKn>Swz+MbTTLo`qHiCqGfy!f1}l2TEUE%mJ2d)lw00YBGcV1qn>+HCHN zDUx&;p3=g6%^mg!LZ6T`*3#FKNvAaI2%)w7qnf_TAP0hom?^CwXWsYA5QlI6RGjg@ z;r2&FQ%ZWl?>;!2y2a2k;=xi~m0$pp?S-0}%x}@706%5PQ;BBn1_oD--*-rQ;-9tpg1$o@)F!%~PBg7dd0j3?VXl@N&>Mbj4tKtGAH5EPr0w*oHQY_%G*| zQ#uw{xAJE0pIgKq#Kxa`(f}20^uWq>kt>_7Z^Q`NR`12AAu#{)=GjmYPIWqe_iGDx z&aNIf5>I6uZY+;)>sOd?)`&FB{_p$-lLswE>;Y(}0dIAVd2{hPYbW~RbFYQ6TGv2JQ))13xj7VAIcnMxsL{XiTHcymH*&IvEo;0nJs{fh& zhJO0-x1?frfCr#r4AZxWCfAQs6;F80rH3GsB~#4-l5?19T~l@miPe9vCKv=1dT@=e zV-mB86?kuzkfD|9-b**hpV3=U0r*xwsvwlg3S$@*d`s5=kKWGkArFgfyAG_dywO*reLQA1s9!Y5+=FrriW~S7rR~`o|MQ~#wJVDUiqd!E z$Rv^-Adv=AyOYA(kFqB^lWR^58i>IKTEe%zKVP;ykq|v4UOlOqxdS{S>EPMC_}qJ| znmtw_oAw!4K!}lkT3WyF`yH*~f6q1X1A2iwHG@8KhUp zl0THJR?mNVhAmyPGlt7BO29B~@s+SHP{Z+x{2|llh|0POIO--tq5G9FMyj!3^>od= zF~<)oeolY<(_|`$gNn5M!T`{5uiS7lFO%yHSEifqKFqGcVd^Nw;|DCIY~*G6cE&{2 zHFrlaWNL;n^8g0bzmDPKoiZyWh=(bK$)1Q_oK@*R2tBFH%$YmM?Fep=+!uAH5}vyO zw5UxVi}1}O<*_?x)aDB{!!qktnP$;K=`5~pbx_qbC_PKchU5{1stD|7B`SC1Pp_}5 zLCimM+=il5gX6uvrG6ZsPyL%FW*e;|=7p_c@{`@jLa7N)7N33nE+{Jb&AVv{(6ziI zUKFOv&l=d4<-R=CA7tHv+OhmJ9<-gD`lvgNdUAM}Egm5Jf*#CTq2Zg_dCtrzWMn6T zgF2w+{WrtfK}u_jS>J*ahEjNj0mxB^0`oZY;N!zMgLfYu&bbwwt-TOn9a|U8EycpO zl<;6Md6`UmCRBaRWDufR&_5^jq4UTQlG4tMndSZuh(@&n^Cv=-U$X+rD9GDk$<3of zk7DW{E7*qJVJKx?+pZ>Mk+Az_yl!&H$)Jg6d`A4))lZKkZhhI^T<1xW+EQ89>48L?v|Jt+YbrG7G*D4w1x*8Ea`}u;n-KOd8(C-8g##hr}-LEyN z`9<}*5BzBhlPIuVjad_72tBiyG4(o%{}fXQ=mJh6^~O_0OjG5UrPq-#1dM0Emhw5` z@YI$iGf1TQubIN{bBSEWD1q3cwKWKgVdo+iQV|8dEdv12bIQpf^g=7GG7~CDe>#)7 zKx_cTr%1yHbbR*4I=w&3L^1aS$0FbEFXFOqzd^w%=IUkVi$Dh?2RiznfM1hQ|6I6k z&!-H?fDa>+;FrnJz60s^buedim>WuKvXKKXmB=Z9z0}7g(f?ORoh%t(0mevHCMq;gzoXtveNDhEO80hPH-2Z<@ z>4kjkWCao^NvzX&WWbl4=l>3hP~!vu%)dxI1Y}5vp39LYM}Qz?4^2_GfrPQRpgn0) zJ1BehqT)X!Zf{83KBzot1C{mYE(!M^_=dmWFb&XQ4@M?Hj3lFC2#Fgw<6qXaq=)+? z_(uP-V@!cF0>CFQ{{SBE)Z@Vj9p?PssS$Lb0RY;2?krEDm%u%6-&+zGS6=g666?1l z*5-!dv7zL^bCUAB!j=(;>wkYb9-Q4F0}@PXW73-cwu`q* zz-wbnnnB+n>VPuD$AO4&4X|Kf=q4&-K0H{VIj}QRF+dw>l4o27HOE_Ub)|jO1!h}5R+ll*ttPiIy5XSpOHH5Te zl36{BC?5RR@ZSIXK-V-2nl<}l(wdR~&lpg83AaI6Kcm)5BMJJs3P}~DHGLwPYT-3+ zF_IFd|JMVF!oS>Lpq(a6lY|0!(sTv)5N!Sz61}%H$Vv!^}0%Ced^>0_+5DO*HK|xY96wLVlr-FC@XeEdV_DaVs-Rvi17Ee~+6>N%{XG9s2;x6?00J%h6-K zmM!2enc);n5Xt$|-86IW-D~>qRrrojkw*Q0{-o~kgOL~*3#^Z?qPrb5zcwbllqvI{ z3evKH1cNE1+8Eq%Xh`nN3U360wm&)e0!54!*U}BJP5SrXM#aqLPXG&;Q$ZJCP;8Gu zWu@MrEOIgZcP;NPLy}TUk_u1rM#kztQ&6h}gV_#XM_NHSuaq=l3$B4{{(23RBmn4F zzj17hN#jFjlhc!w2FOy&bv-R&;TyX}Mn&>GU{QYpUzdX3_U84ndFRP#Lp2T?#Q$DI z>opjtvsI7?hZO~GUhy>`D9GMRl|^-0?z2Dm4Vk3-3Q8NeYFY9_SSL!HEP6Ns7b4e- z(Cc~bWbqLRYOLcCWr~c{v9i~Kf;^{gQDAj^bs_d;!In0sA$YPRR$(apVnM;5-owGR5B#oph}j} z$a)Z614%#)WaWZ~eTyX_MuZTSgd4Uwcq$02@PRn(lRu;cKYpKQA-qv^zGWu#B7ZD3 z!}^jPrwTbV?!_H<79!@M#1&8`0xcizYb2G|G&0QX>E3AC)8cnnpRk`hg^}jxCT4$) zxg(<4G)%>Fh05?|c%j`{F;+m>1GBrl#}yl&7#cF z>YjsY-p6^8vwRHmCAaH0C<-v8XFAH3*_F|_70CGCGd;~tiC3jR52fdCpZ!QGe!(I$ z=!@AgphyI>7Z?+SFKxP~>b8y~l%FA3+Sd;#5PWh9^xfM*V5clMsK%Qtey7KRZDW7R z?4m4Nn84<+2dX(kEWXvF399FL8}ubFSilGZD&#!{UtFB4`Ikb|u4e@vJ@zE}wMyhE z6HY8L+e>uV2Il2lun92X)ZOT|tbR;hIq<|dS?Heuz!rD#O~`Wn6zmR zJ`-@2YuNIYlELGo9oAuD+6FR8PX?32U5|s$M9|rPi4EsOd!plYT+qGDoWUWCg&BWv zQ2>Rd51Y)kOyk-ngI6>6^C{wX?m@`kptz>smW<8FsG3MD)emWWD zoo}7Eth^R~T9alr+ieBusZ0qdhi@+j250dEh--?-I7q7%HRSvb_~BpSbYzv{I1o4_ zy?{`CkZ-!|MjhvhZa){}$?US<%3f-uHry$*ob8L}qRV^+HAz)$)nzk&YQ`J)f~C?+ zEYP}TjPUE+M?`&)oA@?8%aX$<_^f%yBnHfxgczFOFKagO|5C3*+lIITu?B4<-7i&Gj5bNi3tL@mpmfpX`UP9rbBchps`Jxau8+b3DgSgLoIKodh z#4roNfjLA?)a<%Rn8Zb%S8Q0M+=weQ*pjjzDH}Q+s+Z`#DAII=&`!##txeV!8Ns=B z+Sh_T-!Ill@n^nRuDi`U7|m6?);i8LNK31q`X)sOPAYfU9_svX-A7y_PCUR_Dd({ zK9>mmgr{F5v(_$a&(w}OLW*{t+M7F#53Ws7ik&YBLw;A}NYd>YqG(+5b~X;Z3z5g%Wje6~w zN5{s+#?rkzp|s>r-j7^9-gna1bkCp1aQ{k;*Ewrb_P6qqYfIICD-ny@)cOKReCn@M zk#6tse!eR zt*q^fpkjW1LTA>DE@%t$-0(*hIwa`56j=X}Yy*UE zvDQ}sxfQ${9C-YU7*7@Jf->gJXtv+@#`y ziA{?>47GjCHL@l7%`wneTxYpXD$UuTe*!ONoUQfNXZc3lUP4Mxe0#TXnj?d&M&OD> zcp?88MZuvuV-GI!c03X4&+^;?o|C$4>gkN-n0)>irChnO(A7G+GpLa2A=9oej@qtW z^Y+wy`70}BsKNVJVIxX@y3H@8tMf0H6^%iS=Fx{BC|?of;uLK_fxo8ov(xJ92=Y0* z-eu&#<|vEExWg0qtD!utIk~d#j{K(?l(fCYUjlI!a=qEeBVUci>?cc&C_^|+LVWFi z&=E#c0m)tBx63Dw;vRBoWQC*=2a!fMwd#InWuy2!7Y|CdSu5T|l^|sGsmaKY%wFNjDN8A%8qK0tIf%s!P~!xa&`~Mm=960z(0+Nzz*xX&7EuQc{nzK=7=a{yv;t z5m>M9_+vb_Jvbh`V#Q08MnmfdicFez;J8rxVGr}HtIPRcf3~(`=S^8UF)qKzq&5`xKnGK;yQ8TP9!5na)loxo7XQz6mzX zy$G~4{wfm{yr52CoA7C+#Ev*RSkLfS@|(}96wN^FB_ekOLyWV6n}3B8QTW9qa7J{dU&dz$Q5$8p7Cg|hrMqI5egtnGtLX?~D7;4qzzEf-UhO63`r#VzAiwoKy z?|-9t}RLb82BenM6Ze9e);EvgkzSX%g0XKbCfjMp_4$VZ8{B)k1C8_``yH(H5N ze?Wd=J*~xfp<9goomLtKgdBcNTsqxFMcT4@i8Zg@qD7w}lfA8WmERk8!$-9!t!Et3 zX8e-B+#R8ko}YtTW{E#8Tv5aZ^OTXM&|Ocj&7C1{IQn`yiMQZ;kSJf5Snv0#ILHbjqd?f#MR)k+JW{k* z;#>7if-#<9MMnFcQ!&}Dh!p>4}wVKG+Q%PEEa4VkB`mK%>u)GNncYU@`Y1PvYVt)H!I-7T5- zKbl>~9+WrO9%XJiML1y}|JECS`C!)@l;lflTza`DdK}O(RnV&!;J zAuV?SU71JfBKo~g8~E}yCUV&qV8o0VLAaXhsk_ymbRV;boEBuqvUFe;8Wf#eHAZ)2 z5rvlPI*?ck2^|-aJhCfzM3K@S%1>~%Ra^UAQjZtAx)s&-hZK1kC%8Hef-($f#>Cym zxU7ZZc0$^nC2mQzOxf@u`dzXEu$3;MKh)dSs#D_G8(n>ej6WJoOFa{K0zYj_YG-eB zAqJz9Sm&m*_;}pzp^~ye0A$rh^N-YY=RK!*+J9Aa8<1FM4KA*f^E4YKECe&Loh;qz zb$DYiMKgO4)H1dKTXPX#E36MkKjVk7O8+&f^MgrDQx}Kcwf$_u9%B^C{@wK5a-s`=ojavel-d zFWl~&{+Au-zjs=-C&6X%vUucgm~ACYN?%GDY%UyB5X#;`Etnhp%$W2?tQ4hK)Dv8j z$2Yw+b+ev(@ZV`&Y1`@TOnOJZi(w~nI+dMC9@&Bd#Z-rCxJ6LoXNX7DLhYnnA7NSi zWXQc#v`bWqv2~#7eYxm}t&9oZf_liBn0JE`TCt%{Fg1SPX?lJ@N2t;H;Kos>ixFMm z6F=YmdWU4VP@|hK?OH=3F=S0}J7Bv@-R^R!JrOkSiKL-kVPw!zbLc?Y!c%3Ai~;TR zH8Co2t*sU*Iwf9IR8$Lfz8q*-8|riPi{4oir3RIMdOOQ}YK%@?T5lmYm!&Fx+LZkX z==c7NYZlZ%u9)`wE_mV^dZo(^BXa_d(6#v1KgPvy>C@Jb?U;6E^#irG@B#bYj>3*m zB))Luk6(h9dc$JreF>J(J{4V<%wW{yMc~2IuDkWrcnh-v>$J%BEsTATpfbVH(6`$@ znOL`-wg*qjtDkWH@|)mVzZO(tnR$zl0^ZL|$$&5Y=s*HBpZWM6`L-cNOa8wjT zVcjpu|5hCl(UcTleBWoE`xRfjFOA_q`bF|~Pi5}W@afq&d#N*=jsU4ZM=2Jh;Z56< z7aC(qJ!i*+=Pkdr^jr&gTWa5$u6O?qw4U`|ruLv?gwslX;HJ!(a>t*%O@A`j-6jK= zo@%uG_w&3_8ui}VVdNGczE^yvY=MA0_ z$(X!g(h*#c-L+bKtAx8T-l)ZkNXN_L^%S1P?ruEDTK1dgeIGOCJp?&c-f$QpHYj>hC)4Kof<`i_Yvtq z__yFOWg(-|7Q^TP-$x3J08eYNNDEA28@4Xu2H)zVObPxOSl@=-lMB0p9&psK3CGjm zKi{nzi&o8f$W>WlQSA(`#bTu4cMsFGxcO)+nTykzOQcQi%ftok3QR}IPk_duSzu=!%L6sC;>OnHjsEQ`*FEs z4ylT&fsEi`dyA;4O7F*ID5e2KQY7PXmHpjdUi6L1^DBHG=9XJz!lGPOBqjvy`3*VR z*G9~ipM%qw0tv!zncPLWeCy1N;M@*56U31&uV z+uPio!~6MOf5-QqIibSqN7-nvKx705akUz}Y}A&fF%mspH>5*{Ec3#Dz|k#SQnr%E z(Bc;6Y#u=8ck7~0nRU5Q@*Oni&bGS^r!KZ6xUjM)?~-s_&hK=wv!IYYU+|1X!)7w= z5OL&iWQAM$2jfor1dj+-N41T55>%nzlxQU@y9-)Agf)UYOEl$6fo}w*5C~!2OKwSp z+=vaAX2ff`mbYgp*YE+5Y(H3dx|0C3itSUGM<(F)7{(VY?oN)^Ol z6j{KfE7#T44SP@?)||A^p&I1MwXhYUX|X+9KY(rvZK-czXe|EimnW$wt@`qn|Bd@5 zc)Y%2HcMQPm-=1{=PSO!r!nb8JS*c4svpZ=Hq#?D4XV<5+OgSn|IGPx*P)d|vUWm= ze}nn4yz3V#u~ru8Zj{dijq$d)9K#g9)L2S_YLeokXhpM@lYsXX5f(67B)J^VE6Y>k zzsDIVu8blXL?7b9+{GzzG$F`htB#Y?!B2Slz@P7v6)4#C((t8Z2tMkW1ZAG}dqGaON=eN)>TZzJdv^H0bmPK$O<~1_?s+#dX_c)<|u)@0v!aYR{wB9}Tg0Cw_K4_LskX zaXqYYBe_aq=JxVQ>e^-eR$H3aCHbbkl>Ed>9_kq02+Nj~(6ZbpohAXsUWx40J5m-7 zAqD=imetgh8rRkX2x1b)#u6K?FD{SbOTXWJb!oLf>mV-3lz&tVm!K@2N3X>ueD4)s zl*tEdF@ru8-m3f$tMPF3>E9}%tbvD`xf}_Gh)DLF>HR*M8|t=x#*LqoktZ->>%I3L5H+05o1;4l#c}+?3X>;Wf`}p)by{+gH!Y1Ae4{5vpwLg&Qq?3v?b~dh{?5v=Np;p#vcZrT9%?pp)h#vOa(_^b<2p?B;%w91iU+WS*jg^)Gex@!VL7H zSXTis$18AH+0sx7eh9Rl6?Uw7Q^KHs^PjJAICG9R6D$?!HQg^AqL$M?a}BmtxXraszlvUP|0Qde{J|o`miCC2A|A#c5;r z(y=>Mu=niXmCmX&M?1BR#=EBbXA+g4ijh|iwXc5c2?`Kv!Zlh*(tKE!j9b=i8ynu% z!Y4;^Y4}_O(Dr((^?$Hb$c*%^e31KB4U2lAO!x~Q{Ozx$LiSN2%@`sTtT$^cJsT-G zOb$gFUm$fd{m9^@Mp*~Rfk(Am4kDTcY;{+T@~Q-zMIAL8h@KnKe#!AUzq?!Cyt^DP zn7~c=N(LzX>a*wm<({#>ni#qUWk8wOsFFi-b0m=vuZ8A}WB(fCqLhU6pT%iqGj^`~ z?DIGo#O3iIcB5aOp%suwd=56GDw`>lO9_%@`#=X%zww^Rg7*Tu?*UC)Q9qP z$CvhCr~Syo{sdXA{1mHDv_fXFTubsAd9ROWr6Eh;C;2uB0J^H5$u&H)qba z_5;qhyuzlZ`V*0hQ$CpN(_swb+D|_}HnsEO$R2py@b7Z`u@-N=Ip85_PAk1hGsP!< zJ~7~4K_@0FvurXCI{CrmNplYV!-a-FDoI5+nxaa?mfz4RZdd5k7&1Y|4(O3$7djeeqj=$$;(}`|93i+-~jH zM4eBatt9agZZ+?q1~^KGs-awX$VW@WPuG4*y@_un3)%SVA`8{JS<8+RWTt%a9?yr@ z&^GapB2HJXgV%Ld)-0j?ZgWDe?B#ENrVboK_xI~1{+aDLO)I8<2>S4~ z(iOGGt365gD_w(LAPH3w_g|lRkym+Tj>^z%Tec?Y9Jdz7&h4t@jMk*VDy;{WXTgG% z_>Abo0tiN!Cevgv0+R%l7raHFg77ssv8)8@xPEV@=;+9|nMbttl*e7>)O8erG;K}i zH1ksTPCReCu7ALn99k((fiXZ30ent zZW-MPUbqYw#kU^!6mW%a+EBF|dveSG&H!fJFr-`kDD}YYP4Cp;#EH`dcsWymW#3)8 zKVmXxkZk2r8JBhd+c~8dt@13bC{9}!Vb#FxT!m0q+scVLNgqEOe|ZHdv4VO;*C+LC z<{30AYdIx6lCw0`zW6o`Zl#35@*CMKI;%f!fDtMeu0nn&#vjx=6%}XTX!t^)fazg5 zmX|t9x)1%-d|8O^wTsSsh^vlqU{>>I%i)$@mFe1^B%aT=ahZ52&j-=RS&o7ZH=0kZ zQkwsMolBd*RwcIUXDI;#V>|hJ{@fcj>zT5YDMnu}L*S~ml9Mi@A{Uk$<+8S@LWA)u zLUG5!uF&&82R=W|TeU_%x?_0(;d8Kyc@@cjchnVjdPCSq0Z%fRn)bk zFCGP#Sd)KXJ;o{<_L2F0lsdhYQZ%OM`qnAW*>1)PFP&rm(Q{81smB~agpDJ*BY}NgcT*Vyjr!U(5sONn z+u(th{(Nd;iYu;LnGjps_)^kC*rh0<+TW_sUk(uc>nB69Lw?Gqo3SE`<)xbD?eIfM%Rf{0Xq!$jeifo%V6LIePw5LJsL`ff= z_eA3U*zRDLaDzeZIe0&sHI(IRpgHB0-5>ZlIYEiuD$?$6T;b$^Ju!T09wquLF4rNBmkc@aQ27Wxqcv(jJcr*<`3(J+eWm>A`thQ*JDz$}=J^lRsUUqJ|d4afd{L zuOnPee$~?u9mc7_>q_5To`X3zA1qCda~0v#Ctmlcs;FcW(BpR(g5P_$Le+uYzA0mO z^BK6cBwaXe->Z}VMwf>vF+8eTuX8@?sOdC`>kl4&nAnq&@EWD)f*tF&;Yup@R7{j4 zQ?(s1E-oe?umzJNgyDYKO{-|*qEWNKXXB5iU)LA~PdW_iCg7m)-QftE-T;9+{3H6I z@3a*tEg69?!bbdt*O?r^ne6VRA2N#yFpIyW%^Mniy$G-CJpDRv#GzS`xPY>?RZ^p7 zvp;r23iJX4P;gQad4en!mYU+$@AD!#oeKQ5G>1!uc-vn`e&2|| zzb(}n_6kV}#p;bGh}1!L`*X0<4ysvs06Se^ec%I2cN0TvyUklmOI4KJ6A&k}*Q*67 zXknW4_?+B5_%CmNb!YDr(_HD_$H(GU2{@ZZpHMgGERCFZ?L>~Ug51o($2k@x-U$_} zl-zr)qx&hKU||il*`Rl_JjyQ=zvT}ejr|NH>dy?U`Q277>zuuIzuWX8sf3?XJt zVC~HTFI(2`CgEWrWe4}o*1=no%7d1Kg?H;mfhOs`z8%Jdq*&i{<^0^LP47%irut|5 z>R7An)=}Ik>~MzX+Pak<5M>#vRgmbrrfXw9Cl+hf-aZZQzEyPo)D_HWE_5_Eh;zEDnn2I>S49LL;Y~S z>O!TgnWHi3Lg*SL;SVZ6U*(AI!vP%ftpFSf)MUtD-x7rQ9WDJ6GEXi4ja-}lw&ppTdEcV%Q)&m`F>fbq?Ov#Oyx0O#;PQ7ZrC z9(uCM^zTo|fgf5z?Z?X3fJGa&xjPF~QT2HL>lI*i%LnCupWSwz(e<7HH@)#IN*Q&_q3D16xhg+$#5V^eEAc+T@fu;O)D=BH=GP&|zVU zUxAA0j(ku7D>#bPt~rUOLHhupZSGi{39J;7)yf)FxptM3kW)YhtlsT&yN-{ewwiwh zmJOXXU7C1TkpzTA`Bj`7Zm(Pibn}rxh3a;}dFVs+(AWkLdqi*G(hFiB89?Q-a?o+X z)BdUo<%so+wW0wY{u388&%lfX05vm>#7TufE(!vM?-CiX`b?nR*Zms6USb?V=Mtkl zXeaQpHR$~o3Zt{Kbf zgZpJc@f)Nme*D?KNq2Zr4UOB+xL7i{aJQiFbxZs56k-03*yFa#rF|ykVJFT|OREi?)8g*OS|9$N3l<9g z6DGDFaO~={9|Hgt5UfkYJ?)rwl7`sZeU&)?S?0zb`RheWLOoJs9kb;t$3tML*K>i} zTwHXVI-EMdLzSbTV?Q^rYPpZE&-eV^(#C#+Y8LS9J%rFI5f0W-P$nvB-M)p9P`C>$ zS|bQ867y8R!%yr<#k`$`3igFkz{du-G%g_xETbBL$%^s`^?yW6IsE~T0>o-LdzWvI z(g3=##&yd6J;s`1G=PS6iX9s+3H%Q&C8|8=yYE!M%V-d(`ZXX&RgP0X?M<9JESlvu z@IVig91Z9@vXtSGEZ>Z=T)bS|TJ@^#96(2s3-%zZBn1B!Ed3m$7vcPec>%W+kt(p8 zH`5(RiwXz-!>fP>|7rd1?vjMj?k%#88b#W<6R_a8e)<$XOE@z=I6w + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 80dff7dcdec5894b4cf3ac52bd1a01a98f08e7fc Mon Sep 17 00:00:00 2001 From: Moritz Schott Date: Sun, 10 Nov 2024 18:46:36 +0100 Subject: [PATCH 0895/1051] Allow custom hostname for celery_worker in celery.contrib.pytest / celery.contrib.testing.worker (#9405) * test(TestWorkController): add test to assert custom hostname can be supplied to the TestWorkController * feat(TestWorkController): use user provided hostname argument for TestWorkController hostname * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- celery/contrib/testing/worker.py | 2 +- t/unit/contrib/test_worker.py | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index b8d3fc06d87..cb418b8e87a 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -155,7 +155,7 @@ def _start_worker_thread(app: Celery, worker = WorkController( app=app, concurrency=concurrency, - hostname=anon_nodename(), + hostname=kwargs.pop("hostname", anon_nodename()), pool=pool, loglevel=loglevel, logfile=logfile, diff --git a/t/unit/contrib/test_worker.py b/t/unit/contrib/test_worker.py index e3ec8f9a8bf..4534317ae83 100644 --- a/t/unit/contrib/test_worker.py +++ b/t/unit/contrib/test_worker.py @@ -4,12 +4,12 @@ # to install the celery.ping task that the test lib uses import celery.contrib.testing.tasks # noqa from celery import Celery -from celery.contrib.testing.worker import start_worker +from celery.contrib.testing.worker import TestWorkController, start_worker class test_worker: def setup_method(self): - self.app = Celery('celerytest', backend='cache+memory://', broker='memory://',) + self.app = Celery('celerytest', backend='cache+memory://', broker='memory://', ) @self.app.task def add(x, y): @@ -45,3 +45,15 @@ def test_start_worker_with_exception(self): with start_worker(app=self.app, loglevel=0): result = self.error_task.apply_async() result.get(timeout=5) + + def test_start_worker_with_hostname_config(self): + """Make sure a custom hostname can be supplied to the TestWorkController""" + test_hostname = 'test_name@test_host' + with start_worker(app=self.app, loglevel=0, hostname=test_hostname) as w: + + assert isinstance(w, TestWorkController) + assert w.hostname == test_hostname + + result = self.add.s(1, 2).apply_async() + val = result.get(timeout=5) + assert val == 3 From 05f714707e75a1d11b09f022320e31fcc2d94dfd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Nov 2024 00:36:15 +0200 Subject: [PATCH 0896/1051] Removed docker-docs from CI (optional job, malfunctioning) (#9406) --- .github/workflows/docker.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 4587775abaf..4f04a34cc2c 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -33,14 +33,6 @@ jobs: - name: Build Docker container run: make docker-build - docker-docs: - runs-on: blacksmith-4vcpu-ubuntu-2204 - timeout-minutes: 5 - steps: - - uses: actions/checkout@v4 - - name: Build Documentation - run: make docker-docs - smoke-tests_dev: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 From dfe241ecb2bb8f536ddab83b6e18a806029a227d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Nov 2024 18:25:48 +0200 Subject: [PATCH 0897/1051] Added a utility to format changelogs from the auto-generated GitHub release notes (#9408) * Added a utility to format changelogs from the auto-generated GitHub release notes * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/Makefile | 14 +++- docs/changelog_formatter.py | 130 ++++++++++++++++++++++++++++++++++++ requirements/pkgutils.txt | 1 + 3 files changed, 144 insertions(+), 1 deletion(-) create mode 100755 docs/changelog_formatter.py diff --git a/docs/Makefile b/docs/Makefile index cfed0cb0fdf..f42e386e705 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -48,6 +48,7 @@ help: @echo " apicheck to verify that all modules are present in autodoc" @echo " configcheck to verify that all modules are present in autodoc" @echo " spelling to perform a spell check" + @echo " changelog to generate a changelog from GitHub auto-generated release notes" .PHONY: clean clean: @@ -237,4 +238,15 @@ pseudoxml: .PHONY: livehtml livehtml: - sphinx-autobuild -b html --host 0.0.0.0 --port 7000 --watch $(APP) -c . $(SOURCEDIR) $(BUILDDIR)/html \ No newline at end of file + sphinx-autobuild -b html --host 0.0.0.0 --port 7000 --watch $(APP) -c . $(SOURCEDIR) $(BUILDDIR)/html + +.PHONY: changelog +changelog: + @echo "Usage Instructions:" + @echo "1. Generate release notes using GitHub: https://github.com/celery/celery/releases/new" + @echo " - Copy everything that's generated to your clipboard." + @echo " - pre-commit lines will be removed automatically." + @echo "2. Run 'make -C docs changelog' from the root dir, to manually process the changes and output the formatted text." + @echo "" + @echo "Processing changelog from clipboard..." + python ./changelog_formatter.py --clipboard diff --git a/docs/changelog_formatter.py b/docs/changelog_formatter.py new file mode 100755 index 00000000000..1d76ce88564 --- /dev/null +++ b/docs/changelog_formatter.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 + +import re +import sys + +import click +import pyperclip +from colorama import Fore, init + +# Initialize colorama for color support in terminal +init(autoreset=True) + +# Regular expression pattern to match the required lines +PATTERN = re.compile(r"^\*\s*(.*?)\s+by\s+@[\w-]+\s+in\s+https://github\.com/[\w-]+/[\w-]+/pull/(\d+)") + + +def read_changes_file(filename): + try: + with open(filename) as f: + return f.readlines() + except FileNotFoundError: + print(f"Error: {filename} file not found.") + sys.exit(1) + + +def read_from_clipboard(): + text = pyperclip.paste() + return text.splitlines() + + +def process_line(line): + line = line.strip() + + # Skip lines containing '[pre-commit.ci]' + if "[pre-commit.ci]" in line: + return None + + # Skip lines starting with '## What's Changed' + if line.startswith("## What's Changed"): + return None + + # Stop processing if '## New Contributors' is encountered + if line.startswith("## New Contributors"): + return "STOP_PROCESSING" + + # Skip lines that don't start with '* ' + if not line.startswith("* "): + return None + + match = PATTERN.match(line) + if match: + description, pr_number = match.groups() + return f"- {description} (#{pr_number})" + return None + + +@click.command() +@click.option( + "--source", + "-s", + type=click.Path(exists=True), + help="Source file to read from. If not provided, reads from clipboard.", +) +@click.option( + "--dest", + "-d", + type=click.File("w"), + default="-", + help="Destination file to write to. Defaults to standard output.", +) +@click.option( + "--clipboard", + "-c", + is_flag=True, + help="Read input from clipboard explicitly.", +) +def main(source, dest, clipboard): + # Determine the source of input + if clipboard or (not source and not sys.stdin.isatty()): + # Read from clipboard + lines = read_from_clipboard() + elif source: + # Read from specified file + lines = read_changes_file(source) + else: + # Default: read from clipboard + lines = read_from_clipboard() + + output_lines = [] + for line in lines: + output_line = process_line(line) + if output_line == "STOP_PROCESSING": + break + if output_line: + output_lines.append(output_line) + + output_text = "\n".join(output_lines) + + # Prepare the header + version = "x.y.z" + underline = "=" * len(version) + + header = f""" +.. _version-{version}: + +{version} +{underline} + +:release-date: +:release-by: + +What's Changed +~~~~~~~~~~~~~~ +""" + + # Combine header and output + final_output = header + output_text + + # Write output to destination + if dest.name == "": + print(Fore.GREEN + "Copy the following text to Changelog.rst:") + print(Fore.YELLOW + header) + print(Fore.CYAN + output_text) + else: + dest.write(final_output + "\n") + dest.close() + + +if __name__ == "__main__": + main() diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt index 652a4c801a5..fd180f53be3 100644 --- a/requirements/pkgutils.txt +++ b/requirements/pkgutils.txt @@ -9,3 +9,4 @@ sphinx2rst>=1.0 # Disable cyanide until it's fully updated. # cyanide>=1.0.1 bumpversion==0.6.0 +pyperclip==1.9.0 From 1e63b497642726b69b9e3e87747f9740322d14a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 01:28:00 +0200 Subject: [PATCH 0898/1051] Bump codecov/codecov-action from 4 to 5 (#9412) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 6a5124ee59a..794788269fd 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -70,7 +70,7 @@ jobs: run: | tox --verbose --verbose - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: flags: unittests # optional fail_ci_if_error: true # optional (default = false) From fadc1ae6cc2f9af799f65f752c5a79980f3e725f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 14:53:01 +0200 Subject: [PATCH 0899/1051] Update elasticsearch requirement from <=8.15.1 to <=8.16.0 (#9410) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.16.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 024d7624268..269031ec71e 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.15.1 +elasticsearch<=8.16.0 elastic-transport<=8.15.1 From fa5d7ff09c93516c9d5712351f56db3d22876395 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Sun, 17 Nov 2024 17:24:04 +0200 Subject: [PATCH 0900/1051] Native Delayed Delivery in RabbitMQ (#9207) * Add the DelayedDelivery bootstep. * Comment POC code only * Add feature flag. * Add coverage for include_if. * Remove POC only code. * Added unit tests for delayed delivery implementation. * Autopep8. * Fix import sorting. * Add coverage for case when native delayed delivery is enabled but no eta or countdown were provided * formatting fix. * Add coverage for delayed delivery. * Formatting fix. * Adjust warning and disable qos global when using native delayed delivery. * Added basic smoke tests for native delayed delivery. * Added smoke tests that test the configuration of the native delayed delivery queues and exchanges. * Add condition for not handling countdowns in the past and direct exchanges * Add warning when native delayed delivery is enabled but the exchange is a direct exchange. * Fixed a bug where the delayed message would be published to celery_delayed_27 as well as the right queue. * Add broker_native_delayed_delivery setting to documentation. * Add title. * Added documentation for the broker_native_delayed_delivery setting. * Added the broker_native_delayed_delivery_queue_type setting. * Document quorum queues and limitations. * Add documentation regarding native delayed delivery. * Mention that confirmed publish must be set to true. * Cover both values of broker_native_delayed_delivery_queue_type in smoke tests. * Revert usage of broker_native_delayed_delivery_queue_type. * logger.warn is deprecated * Fix include_if condition to take failover into consideration. * Fix smoke tests. * Revert "Revert usage of broker_native_delayed_delivery_queue_type." This reverts commit ce3156005254a8576792bb23d377f261bebc6ca2. * Apply x-dead-letter-strategy only on quorum queues. * Fix unit tests. * Use kombu native delayed delivery API. * Add documentation. * Delayed native delivery queues can now be non-quorum queues. * Declare native delayed delivery queues on failover brokers as well. * Fix unit tests. * Use connection to check for the transport type. * Add versionadded to the documentation. * Add link to quorum queues migration guide. * Fix failover when connection is refused. * Change native delayed delivery queue type default to quorum. * Remove warning. * Use native delayed delivery automatically when quorum queues are detected. * Remove the broker_native_delayed_delivery configuration setting. * Use fixtures and extract common test code. * Adjust documentation. --- celery/app/base.py | 35 +++- celery/app/defaults.py | 1 + celery/backends/dynamodb.py | 2 +- celery/backends/elasticsearch.py | 2 +- celery/bin/base.py | 1 + celery/utils/quorum_queues.py | 20 ++ celery/worker/consumer/consumer.py | 5 +- celery/worker/consumer/delayed_delivery.py | 37 ++++ celery/worker/consumer/tasks.py | 40 +--- .../backends-and-brokers/rabbitmq.rst | 64 ++++++ docs/userguide/configuration.rst | 25 ++- t/integration/tasks.py | 1 + .../test_native_delayed_delivery.py | 150 ++++++++++++++ t/unit/app/test_app.py | 184 +++++++++++++++++- t/unit/app/test_backends.py | 4 +- t/unit/tasks/test_tasks.py | 1 + t/unit/worker/test_consumer.py | 22 +-- t/unit/worker/test_native_delayed_delivery.py | 74 +++++++ 18 files changed, 599 insertions(+), 69 deletions(-) create mode 100644 celery/utils/quorum_queues.py create mode 100644 celery/worker/consumer/delayed_delivery.py create mode 100644 t/smoke/tests/quorum_queues/test_native_delayed_delivery.py create mode 100644 t/unit/worker/test_native_delayed_delivery.py diff --git a/celery/app/base.py b/celery/app/base.py index 7af07de9410..27b0421763c 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -14,9 +14,10 @@ from click.exceptions import Exit from dateutil.parser import isoparse -from kombu import pools +from kombu import Exchange, pools from kombu.clocks import LamportClock from kombu.common import oid_from +from kombu.transport.native_delayed_delivery import calculate_routing_key from kombu.utils.compat import register_after_fork from kombu.utils.objects import cached_property from kombu.utils.uuid import uuid @@ -38,6 +39,7 @@ from celery.utils.time import maybe_make_aware, timezone, to_utc from ..utils.annotations import annotation_is_class, annotation_issubclass, get_optional_arg +from ..utils.quorum_queues import detect_quorum_queues # Load all builtin tasks from . import backends, builtins # noqa from .annotations import prepare as prepare_annotations @@ -513,6 +515,7 @@ def _create_task_cls(fun): if shared: def cons(app): return app._task_from_fun(fun, **opts) + cons.__name__ = fun.__name__ connect_on_app_finalize(cons) if not lazy or self.finalized: @@ -828,6 +831,33 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, ignore_result = options.pop('ignore_result', False) options = router.route( options, route_name or name, args, kwargs, task_type) + + is_native_delayed_delivery = detect_quorum_queues(self, + self.producer_pool.connections.connection.transport_cls)[0] + if is_native_delayed_delivery and options['queue'].exchange.type != 'direct': + if eta: + if isinstance(eta, str): + eta = isoparse(eta) + countdown = (maybe_make_aware(eta) - self.now()).total_seconds() + + if countdown: + if countdown > 0: + routing_key = calculate_routing_key(int(countdown), options["queue"].routing_key) + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + del options['queue'] + options['routing_key'] = routing_key + options['exchange'] = exchange + elif is_native_delayed_delivery and options['queue'].exchange.type == 'direct': + logger.warning( + 'Direct exchanges are not supported with native delayed delivery.\n' + f'{options["queue"].exchange.name} is a direct exchange but should be a topic exchange or ' + 'a fanout exchange in order for native delayed delivery to work properly.\n' + 'If quorum queues are used, this task may block the worker process until the ETA arrives.' + ) + if expires is not None: if isinstance(expires, datetime): expires_s = (maybe_make_aware( @@ -988,6 +1018,7 @@ def _connection(self, url, userid=None, password=None, 'broker_connection_timeout', connect_timeout ), ) + broker_connection = connection def _acquire_connection(self, pool=True): @@ -1007,6 +1038,7 @@ def connection_or_acquire(self, connection=None, pool=True, *_, **__): will be acquired from the connection pool. """ return FallbackContext(connection, self._acquire_connection, pool=pool) + default_connection = connection_or_acquire # XXX compat def producer_or_acquire(self, producer=None): @@ -1022,6 +1054,7 @@ def producer_or_acquire(self, producer=None): return FallbackContext( producer, self.producer_pool.acquire, block=True, ) + default_producer = producer_or_acquire # XXX compat def prepare_config(self, c): diff --git a/celery/app/defaults.py b/celery/app/defaults.py index 04bc1927944..f8e2511fd01 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -95,6 +95,7 @@ def __repr__(self): heartbeat=Option(120, type='int'), heartbeat_checkrate=Option(3.0, type='int'), login_method=Option(None, type='string'), + native_delayed_delivery_queue_type=Option(default='quorum', type='string'), pool_limit=Option(10, type='int'), use_ssl=Option(False, type='bool'), diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py index d5159353b00..0423a468014 100644 --- a/celery/backends/dynamodb.py +++ b/celery/backends/dynamodb.py @@ -504,7 +504,7 @@ def _prepare_inc_count_request(self, key: str) -> Dict[str, Any]: "ExpressionAttributeValues": { ":num": {"N": "1"}, }, - "ReturnValues" : "UPDATED_NEW", + "ReturnValues": "UPDATED_NEW", } def _item_to_dict(self, raw_response): diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index a97869bef52..9e6f2655639 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -97,7 +97,7 @@ def exception_safe_to_retry(self, exc): # N/A: Low level exception (i.e. socket exception) if exc.status_code in {401, 409, 500, 502, 504, 'N/A'}: return True - if isinstance(exc , elasticsearch.exceptions.TransportError): + if isinstance(exc, elasticsearch.exceptions.TransportError): return True return False diff --git a/celery/bin/base.py b/celery/bin/base.py index 073b86a7e91..61cc37a0291 100644 --- a/celery/bin/base.py +++ b/celery/bin/base.py @@ -173,6 +173,7 @@ def format_options(self, ctx, formatter): class DaemonOption(CeleryOption): """Common daemonization option""" + def __init__(self, *args, **kwargs): super().__init__(args, help_group=kwargs.pop("help_group", "Daemonization Options"), diff --git a/celery/utils/quorum_queues.py b/celery/utils/quorum_queues.py new file mode 100644 index 00000000000..0eb058fa6b2 --- /dev/null +++ b/celery/utils/quorum_queues.py @@ -0,0 +1,20 @@ +from __future__ import annotations + + +def detect_quorum_queues(app, driver_type: str) -> tuple[bool, str]: + """Detect if any of the queues are quorum queues. + + Returns: + tuple[bool, str]: A tuple containing a boolean indicating if any of the queues are quorum queues + and the name of the first quorum queue found or an empty string if no quorum queues were found. + """ + is_rabbitmq_broker = driver_type == 'amqp' + + if is_rabbitmq_broker: + queues = app.amqp.queues + for qname in queues: + qarguments = queues[qname].queue_arguments or {} + if qarguments.get("x-queue-type") == "quorum": + return True, qname + + return False, "" diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index d1b38232c6e..a66f5443872 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -169,6 +169,7 @@ class Blueprint(bootsteps.Blueprint): 'celery.worker.consumer.heart:Heart', 'celery.worker.consumer.control:Control', 'celery.worker.consumer.tasks:Tasks', + 'celery.worker.consumer.delayed_delivery:DelayedDelivery', 'celery.worker.consumer.consumer:Evloop', 'celery.worker.consumer.agent:Agent', ] @@ -477,9 +478,9 @@ def connection_for_read(self, heartbeat=None): return self.ensure_connected( self.app.connection_for_read(heartbeat=heartbeat)) - def connection_for_write(self, heartbeat=None): + def connection_for_write(self, url=None, heartbeat=None): return self.ensure_connected( - self.app.connection_for_write(heartbeat=heartbeat)) + self.app.connection_for_write(url=url, heartbeat=heartbeat)) def ensure_connected(self, conn): # Callback called for each retry while the connection diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py new file mode 100644 index 00000000000..d6672d32f5e --- /dev/null +++ b/celery/worker/consumer/delayed_delivery.py @@ -0,0 +1,37 @@ +from kombu.transport.native_delayed_delivery import (bind_queue_to_native_delayed_delivery_exchange, + declare_native_delayed_delivery_exchanges_and_queues) + +from celery import Celery, bootsteps +from celery.utils.log import get_logger +from celery.utils.quorum_queues import detect_quorum_queues +from celery.worker.consumer import Consumer, Tasks + +__all__ = ('DelayedDelivery',) + +logger = get_logger(__name__) + + +class DelayedDelivery(bootsteps.StartStopStep): + """This bootstep declares native delayed delivery queues and exchanges and binds all queues to them""" + requires = (Tasks,) + + def include_if(self, c): + return detect_quorum_queues(c.app, c.app.connection_for_write().transport.driver_type)[0] + + def start(self, c: Consumer): + app: Celery = c.app + + for broker_url in app.conf.broker_url.split(';'): + try: + # We use connection for write directly to avoid using ensure_connection() + connection = c.app.connection_for_write(url=broker_url) + declare_native_delayed_delivery_exchanges_and_queues( + connection, + app.conf.broker_native_delayed_delivery_queue_type + ) + + for queue in app.amqp.queues.values(): + bind_queue_to_native_delayed_delivery_exchange(connection, queue) + except ConnectionRefusedError: + # We may receive this error if a fail-over occurs + continue diff --git a/celery/worker/consumer/tasks.py b/celery/worker/consumer/tasks.py index 9f5d9408845..67cbfc1207f 100644 --- a/celery/worker/consumer/tasks.py +++ b/celery/worker/consumer/tasks.py @@ -2,32 +2,21 @@ from __future__ import annotations -import warnings - from kombu.common import QoS, ignore_errors from celery import bootsteps -from celery.exceptions import CeleryWarning from celery.utils.log import get_logger +from celery.utils.quorum_queues import detect_quorum_queues from .mingle import Mingle __all__ = ('Tasks',) + logger = get_logger(__name__) debug = logger.debug -ETA_TASKS_NO_GLOBAL_QOS_WARNING = """ -Detected quorum queue "%r", disabling global QoS. -With global QoS disabled, ETA tasks may not function as expected. Instead of adjusting -the prefetch count dynamically, ETA tasks will occupy the prefetch buffer, potentially -blocking other tasks from being consumed. To mitigate this, either set a high prefetch -count or avoid using quorum queues until the ETA mechanism is updated to support a -disabled global QoS, which is required for quorum queues. -""" - - class Tasks(bootsteps.StartStopStep): """Bootstep starting the task message consumer.""" @@ -90,31 +79,10 @@ def qos_global(self, c) -> bool: qos_global = not c.connection.qos_semantics_matches_spec if c.app.conf.worker_detect_quorum_queues: - using_quorum_queues, qname = self.detect_quorum_queues(c) + using_quorum_queues, qname = detect_quorum_queues(c.app, c.connection.transport.driver_type) + if using_quorum_queues: qos_global = False logger.info("Global QoS is disabled. Prefetch count in now static.") - # The ETA tasks mechanism requires additional work for Celery to fully support - # quorum queues. Warn the user that ETA tasks may not function as expected until - # this is done so we can at least support quorum queues partially for now. - warnings.warn(ETA_TASKS_NO_GLOBAL_QOS_WARNING % (qname,), CeleryWarning) return qos_global - - def detect_quorum_queues(self, c) -> tuple[bool, str]: - """Detect if any of the queues are quorum queues. - - Returns: - tuple[bool, str]: A tuple containing a boolean indicating if any of the queues are quorum queues - and the name of the first quorum queue found or an empty string if no quorum queues were found. - """ - is_rabbitmq_broker = c.connection.transport.driver_type == 'amqp' - - if is_rabbitmq_broker: - queues = c.app.amqp.queues - for qname in queues: - qarguments = queues[qname].queue_arguments or {} - if qarguments.get("x-queue-type") == "quorum": - return True, qname - - return False, "" diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index a7f1bfbaba4..2c6d14ab640 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -172,3 +172,67 @@ but rather use the :command:`rabbitmqctl` command: $ sudo rabbitmqctl stop When the server is running, you can continue reading `Setting up RabbitMQ`_. + +.. _using-quorum-queues: + +Using Quorum Queues +=================== + +.. versionadded:: 5.5 + +.. warning:: + + Quorum Queues require disabling global QoS which means some features won't work as expected. + See `limitations`_ for details. + +Celery supports `Quorum Queues`_ by setting the ``x-queue-type`` header to ``quorum` like so: + +.. code-block:: python + + from kombu import Queue + + task_queues = [Queue('my-queue', queue_arguments={'x-queue-type': 'quorum'})] + broker_transport_options = {"confirm_publish": True} + +If you'd like to change the type of the default queue, set the :setting:`task_default_queue_type` setting to ``quorum``. + +Celery automatically detects if quorum queues are used using the :setting:`worker_detect_quorum_queues` setting. +We recommend to keep the default behavior turned on. + +To migrate from classic mirrored queues to quorum queues, please refer to RabbitMQ's `documentation `_ on the subject. + +.. _`Quorum Queues`: https://www.rabbitmq.com/docs/quorum-queues + +.. _limitations: + +Limitations +----------- + +Disabling global QoS means that the the per-channel QoS is now static. +This means that some Celery features won't work when using Quorum Queues. + +Autoscaling relies on increasing and decreasing the prefetch count whenever a new process is instantiated +or terminated so it won't work when Quorum Queues are detected. + +Similarly, the :setting:`worker_enable_prefetch_count_reduction` setting will be a no-op even when set to ``True`` +when Quorum Queues are detected. + +In addition, :ref:`ETA/Countdown ` will block the worker when received until the ETA arrives since +we can no longer increase the prefetch count and fetch another task from the queue. + +In order to properly schedule ETA/Countdown tasks we automatically detect if quorum queues are used +and in case they are, Celery automatically enables :ref:`Native Delayed Delivery `. + +.. _native-delayed-delivery: + +Native Delayed Delivery +----------------------- + +Since tasks with ETA/Countdown will block the worker until they are scheduled for execution, +we need to use RabbitMQ's native capabilities to schedule the execution of tasks. + +The design is borrowed from NServiceBus. If you are interested in the implementation details, refer to their `documentation`_. + +.. _documentation: https://docs.particular.net/transports/rabbitmq/delayed-delivery + +Native Delayed Delivery is automatically enabled when quorum queues are detected. diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 391dc35c8b9..ef0dc811701 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -79,6 +79,7 @@ have been moved into a new ``task_`` prefix. ``BROKER_FAILOVER_STRATEGY`` :setting:`broker_failover_strategy` ``BROKER_HEARTBEAT`` :setting:`broker_heartbeat` ``BROKER_LOGIN_METHOD`` :setting:`broker_login_method` +``BROKER_NATIVE_DELAYED_DELIVERY_QUEUE_TYPE`` :setting:`broker_native_delayed_delivery_queue_type` ``BROKER_POOL_LIMIT`` :setting:`broker_pool_limit` ``BROKER_USE_SSL`` :setting:`broker_use_ssl` ``CELERY_CACHE_BACKEND`` :setting:`cache_backend` @@ -2654,14 +2655,6 @@ queue argument. If the :setting:`worker_detect_quorum_queues` setting is enabled, the worker will automatically detect the queue type and disable the global QoS accordingly. -.. warning:: - - When using quorum queues, ETA tasks may not function as expected. Instead of adjusting - the prefetch count dynamically, ETA tasks will occupy the prefetch buffer, potentially - blocking other tasks from being consumed. To mitigate this, either set a high prefetch - count or avoid using quorum queues until the ETA mechanism is updated to support a - disabled global QoS, which is required for quorum queues. - .. warning:: Quorum queues require confirm publish to be enabled. @@ -3004,6 +2997,22 @@ Default: ``"AMQPLAIN"``. Set custom amqp login method. +.. setting:: broker_native_delayed_delivery_queue_type + +``broker_native_delayed_delivery_queue_type`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. versionadded:: 5.5 + +:transports supported: ``pyamqp`` + +Default: ``"quorum"``. + +This setting is used to allow changing the default queue type for the +native delayed delivery queues. The other viable option is ``"classic"`` which +is only supported by RabbitMQ and sets the queue type to ``classic`` using the ``x-queue-type`` +queue argument. + .. setting:: broker_transport_options ``broker_transport_options`` diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 227e3cb2917..031c89e002e 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -344,6 +344,7 @@ def __hash__(self): class UnpickleableException(Exception): """Exception that doesn't survive a pickling roundtrip (dump + load).""" + def __init__(self, foo, bar=None): if bar is None: # We define bar with a default value in the signature so that diff --git a/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py new file mode 100644 index 00000000000..904b7047287 --- /dev/null +++ b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py @@ -0,0 +1,150 @@ +from datetime import timedelta +from datetime import timezone as datetime_timezone + +import pytest +import requests +from future.backports.datetime import datetime +from pytest_celery import CeleryTestSetup +from requests.auth import HTTPBasicAuth + +from celery import Celery +from t.smoke.tasks import noop +from t.smoke.tests.quorum_queues.conftest import RabbitMQManagementBroker + + +@pytest.fixture +def queues(celery_setup: CeleryTestSetup) -> list: + broker: RabbitMQManagementBroker = celery_setup.broker + api = broker.get_management_url() + "/api/queues" + response = requests.get(api, auth=HTTPBasicAuth("guest", "guest")) + assert response.status_code == 200 + + queues = response.json() + assert isinstance(queues, list) + + return queues + + +@pytest.fixture +def exchanges(celery_setup: CeleryTestSetup) -> list: + broker: RabbitMQManagementBroker = celery_setup.broker + api = broker.get_management_url() + "/api/exchanges" + response = requests.get(api, auth=HTTPBasicAuth("guest", "guest")) + assert response.status_code == 200 + + exchanges = response.json() + assert isinstance(exchanges, list) + + return exchanges + + +def queue_configuration_test_helper(celery_setup, queues): + res = [queue for queue in queues if queue["name"].startswith('celery_delayed')] + assert len(res) == 28 + for queue in res: + queue_level = int(queue["name"].split("_")[-1]) + + queue_arguments = queue["arguments"] + if queue_level == 0: + assert queue_arguments["x-dead-letter-exchange"] == "celery_delayed_delivery" + else: + assert queue_arguments["x-dead-letter-exchange"] == f"celery_delayed_{queue_level - 1}" + + assert queue_arguments["x-message-ttl"] == pow(2, queue_level) * 1000 + + conf = celery_setup.app.conf + assert queue_arguments["x-queue-type"] == conf.broker_native_delayed_delivery_queue_type + + +def exchange_configuration_test_helper(exchanges): + res = [exchange for exchange in exchanges if exchange["name"].startswith('celery_delayed')] + assert len(res) == 29 + for exchange in res: + assert exchange["type"] == "topic" + + +class test_broker_configuration_quorum: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_transport_options = {"confirm_publish": True} + app.conf.task_default_queue_type = "quorum" + app.conf.broker_native_delayed_delivery_queue_type = 'quorum' + app.conf.task_default_exchange_type = 'topic' + app.conf.task_default_routing_key = 'celery' + + return app + + def test_native_delayed_delivery_queue_configuration( + self, + queues: list, + celery_setup: CeleryTestSetup + ): + queue_configuration_test_helper(celery_setup, queues) + + def test_native_delayed_delivery_exchange_configuration(self, exchanges: list, celery_setup: CeleryTestSetup): + exchange_configuration_test_helper(exchanges) + + +class test_broker_configuration_classic: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_transport_options = {"confirm_publish": True} + app.conf.task_default_queue_type = "quorum" + app.conf.broker_native_delayed_delivery_queue_type = 'classic' + app.conf.task_default_exchange_type = 'topic' + app.conf.task_default_routing_key = 'celery' + + return app + + def test_native_delayed_delivery_queue_configuration( + self, + queues: list, + celery_setup: CeleryTestSetup, + default_worker_app: Celery + ): + queue_configuration_test_helper(celery_setup, queues) + + def test_native_delayed_delivery_exchange_configuration(self, exchanges: list, celery_setup: CeleryTestSetup): + exchange_configuration_test_helper(exchanges) + + +class test_native_delayed_delivery: + @pytest.fixture + def default_worker_app(self, default_worker_app: Celery) -> Celery: + app = default_worker_app + app.conf.broker_transport_options = {"confirm_publish": True} + app.conf.task_default_queue_type = "quorum" + app.conf.task_default_exchange_type = 'topic' + app.conf.task_default_routing_key = 'celery' + + return app + + def test_countdown(self, celery_setup: CeleryTestSetup): + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(countdown=5) + + result.get(timeout=10) + + def test_eta(self, celery_setup: CeleryTestSetup): + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(eta=datetime.now(datetime_timezone.utc) + timedelta(0, 5)) + + result.get(timeout=10) + + def test_eta_str(self, celery_setup: CeleryTestSetup): + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(eta=(datetime.now(datetime_timezone.utc) + timedelta(0, 5)).isoformat()) + + result.get(timeout=10) + + def test_eta_in_the_past(self, celery_setup: CeleryTestSetup): + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(eta=(datetime.now(datetime_timezone.utc) - timedelta(0, 5)).isoformat()) + + result.get(timeout=10) diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 4d132a537d3..479a418cf67 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -9,11 +9,13 @@ from copy import deepcopy from datetime import datetime, timedelta from datetime import timezone as datetime_timezone +from logging import LogRecord from pickle import dumps, loads from typing import Optional -from unittest.mock import DEFAULT, Mock, patch +from unittest.mock import ANY, DEFAULT, MagicMock, Mock, patch import pytest +from kombu import Exchange, Queue from pydantic import BaseModel, ValidationInfo, model_validator from vine import promise @@ -1270,7 +1272,8 @@ def test_pool_no_multiprocessing(self, mask_modules): def test_bugreport(self): assert self.app.bugreport() - def test_send_task__connection_provided(self): + @patch('celery.app.base.detect_quorum_queues', return_value=[False, ""]) + def test_send_task__connection_provided(self, detect_quorum_queues): connection = Mock(name='connection') router = Mock(name='router') router.route.return_value = {} @@ -1421,6 +1424,183 @@ def test_send_task_expire_as_string(self): except TypeError as e: pytest.fail(f'raise unexcepted error {e}') + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_countdown(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + } + + self.app.send_task('foo', (1, 2), countdown=30) + + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + exchange=exchange, + routing_key='0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.1.1.1.0.testcelery' + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_eta_datetime(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + } + self.app.now = Mock(return_value=datetime(2024, 8, 24, tzinfo=datetime_timezone.utc)) + + self.app.send_task('foo', (1, 2), eta=datetime(2024, 8, 25)) + + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + exchange=exchange, + routing_key='0.0.0.0.0.0.0.0.0.0.0.1.0.1.0.1.0.0.0.1.1.0.0.0.0.0.0.0.testcelery' + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_eta_str(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + } + self.app.now = Mock(return_value=datetime(2024, 8, 24, tzinfo=datetime_timezone.utc)) + + self.app.send_task('foo', (1, 2), eta=datetime(2024, 8, 25).isoformat()) + + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + exchange=exchange, + routing_key='0.0.0.0.0.0.0.0.0.0.0.1.0.1.0.1.0.0.0.1.1.0.0.0.0.0.0.0.testcelery', + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_no_eta_or_countdown(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = {'queue': Queue('testcelery', routing_key='testcelery')} + + self.app.send_task('foo', (1, 2), countdown=-10) + + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + queue=Queue( + 'testcelery', + routing_key='testcelery' + ) + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_countdown_in_the_past(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + } + + self.app.send_task('foo', (1, 2)) + + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + queue=Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_eta_in_the_past(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + } + self.app.now = Mock(return_value=datetime(2024, 8, 24, tzinfo=datetime_timezone.utc)) + + self.app.send_task('foo', (1, 2), eta=datetime(2024, 8, 23).isoformat()) + + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + queue=Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='topic') + ) + ) + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery_direct_exchange(self, detect_quorum_queues, caplog): + self.app.amqp = MagicMock(name='amqp') + self.app.amqp.router.route.return_value = { + 'queue': Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='direct') + ) + } + + self.app.send_task('foo', (1, 2), countdown=10) + + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + queue=Queue( + 'testcelery', + routing_key='testcelery', + exchange=Exchange('testcelery', type='direct') + ) + ) + + assert len(caplog.records) == 1 + record: LogRecord = caplog.records[0] + assert record.levelname == "WARNING" + assert record.message == ( + "Direct exchanges are not supported with native delayed delivery.\n" + "testcelery is a direct exchange but should be a topic exchange or " + "a fanout exchange in order for native delayed delivery to work properly.\n" + "If quorum queues are used, this task may block the worker process until the ETA arrives." + ) + class test_defaults: diff --git a/t/unit/app/test_backends.py b/t/unit/app/test_backends.py index 54b28456627..af6def1d150 100644 --- a/t/unit/app/test_backends.py +++ b/t/unit/app/test_backends.py @@ -115,8 +115,8 @@ def test_backend_can_not_be_module(self, app): @pytest.mark.celery( result_backend=f'{CachedBackendWithTreadTrucking.__module__}.' - f'{CachedBackendWithTreadTrucking.__qualname__}' - f'+memory://') + f'{CachedBackendWithTreadTrucking.__qualname__}' + f'+memory://') def test_backend_thread_safety(self): @self.app.task def dummy_add_task(x, y): diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index f262efc1bc6..7462313c74f 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -377,6 +377,7 @@ class MyCustomException(Exception): class UnpickleableException(Exception): """Exception that doesn't survive a pickling roundtrip (dump + load).""" + def __init__(self, foo, bar): super().__init__(foo) self.bar = bar diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index b43471134b2..3523e18056d 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -10,14 +10,15 @@ from celery import bootsteps from celery.contrib.testing.mocks import ContextMock -from celery.exceptions import CeleryWarning, WorkerShutdown, WorkerTerminate +from celery.exceptions import WorkerShutdown, WorkerTerminate from celery.utils.collections import LimitedSet +from celery.utils.quorum_queues import detect_quorum_queues from celery.worker.consumer.agent import Agent from celery.worker.consumer.consumer import CANCEL_TASKS_BY_DEFAULT, CLOSE, TERMINATE, Consumer from celery.worker.consumer.gossip import Gossip from celery.worker.consumer.heart import Heart from celery.worker.consumer.mingle import Mingle -from celery.worker.consumer.tasks import ETA_TASKS_NO_GLOBAL_QOS_WARNING, Tasks +from celery.worker.consumer.tasks import Tasks from celery.worker.state import active_requests @@ -652,8 +653,7 @@ def test_detect_quorum_queues_positive(self): c = self.c self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} - tasks = Tasks(c) - result, name = tasks.detect_quorum_queues(c) + result, name = detect_quorum_queues(c.app, c.connection.transport.driver_type) assert result assert name == "celery" @@ -661,16 +661,14 @@ def test_detect_quorum_queues_negative(self): c = self.c self.c.connection.transport.driver_type = 'amqp' c.app.amqp.queues = {"celery": Mock(queue_arguments=None)} - tasks = Tasks(c) - result, name = tasks.detect_quorum_queues(c) + result, name = detect_quorum_queues(c.app, c.connection.transport.driver_type) assert not result assert name == "" def test_detect_quorum_queues_not_rabbitmq(self): c = self.c self.c.connection.transport.driver_type = 'redis' - tasks = Tasks(c) - result, name = tasks.detect_quorum_queues(c) + result, name = detect_quorum_queues(c.app, c.connection.transport.driver_type) assert not result assert name == "" @@ -693,14 +691,6 @@ def test_qos_global_worker_detect_quorum_queues_true_with_quorum_queues(self): tasks = Tasks(c) assert tasks.qos_global(c) is False - def test_qos_global_eta_warning(self): - c = self.c - self.c.connection.transport.driver_type = 'amqp' - c.app.amqp.queues = {"celery": Mock(queue_arguments={"x-queue-type": "quorum"})} - tasks = Tasks(c) - with pytest.warns(CeleryWarning, match=ETA_TASKS_NO_GLOBAL_QOS_WARNING % "celery"): - tasks.qos_global(c) - def test_log_when_qos_is_false(self, caplog): c = self.c c.connection.transport.driver_type = 'amqp' diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py new file mode 100644 index 00000000000..2170869d7ef --- /dev/null +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -0,0 +1,74 @@ +from logging import LogRecord +from unittest.mock import Mock, patch + +from kombu import Exchange, Queue + +from celery.worker.consumer.delayed_delivery import DelayedDelivery + + +class test_DelayedDelivery: + @patch('celery.worker.consumer.delayed_delivery.detect_quorum_queues', return_value=[False, ""]) + def test_include_if_no_quorum_queues_detected(self, detect_quorum_queues): + consumer_mock = Mock() + + delayed_delivery = DelayedDelivery(consumer_mock) + + assert delayed_delivery.include_if(consumer_mock) is False + + @patch('celery.worker.consumer.delayed_delivery.detect_quorum_queues', return_value=[True, ""]) + def test_include_if_quorum_queues_detected(self, detect_quorum_queues): + consumer_mock = Mock() + + delayed_delivery = DelayedDelivery(consumer_mock) + + assert delayed_delivery.include_if(consumer_mock) is True + + def test_start_native_delayed_delivery_direct_exchange(self, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='direct')) + } + + delayed_delivery = DelayedDelivery(consumer_mock) + + delayed_delivery.start(consumer_mock) + + assert len(caplog.records) == 1 + record: LogRecord = caplog.records[0] + assert record.levelname == "WARNING" + assert record.message == ( + "Exchange celery is a direct exchange " + "and native delayed delivery do not support direct exchanges.\n" + "ETA tasks published to this exchange " + "will block the worker until the ETA arrives." + ) + + def test_start_native_delayed_delivery_topic_exchange(self, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='topic')) + } + + delayed_delivery = DelayedDelivery(consumer_mock) + + delayed_delivery.start(consumer_mock) + + assert len(caplog.records) == 0 + + def test_start_native_delayed_delivery_fanout_exchange(self, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='fanout')) + } + + delayed_delivery = DelayedDelivery(consumer_mock) + + delayed_delivery.start(consumer_mock) + + assert len(caplog.records) == 0 From 1ca6ebbec1582fe2fe6c6c59b80cc60d227bb1cd Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 18 Nov 2024 20:00:05 +0200 Subject: [PATCH 0901/1051] Prepare for (pre) release: v5.5.0rc2 (#9416) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0rc1 → 5.5.0rc2 * Added Changelog for v5.5.0rc2 --- .bumpversion.cfg | 2 +- Changelog.rst | 167 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 167 +++++++++++++++++++++++++++++++++ docs/history/whatsnew-5.5.rst | 2 +- docs/includes/introduction.txt | 2 +- 7 files changed, 339 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index c037934602a..0ab9df2c382 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0rc1 +current_version = 5.5.0rc2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index e74f9b62b2f..56bb0880f31 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,173 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc2: + +5.5.0rc2 +======== + +:release-date: 2024-11-18 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 2 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Fix: Treat dbm.error as a corrupted schedule file (#9331) +- Pin pre-commit to latest version 4.0.1 (#9343) +- Added Python 3.13 to Dockerfiles (#9350) +- Skip test_pool_restart_import_modules on PyPy due to test issue (#9352) +- Update elastic-transport requirement from <=8.15.0 to <=8.15.1 (#9347) +- added dragonfly logo (#9353) +- Update README.rst (#9354) +- Update README.rst (#9355) +- Update mypy to 1.12.0 (#9356) +- Bump Kombu to v5.5.0rc1 (#9357) +- Fix `celery --loader` option parsing (#9361) +- Add support for Google Pub/Sub transport (#9351) +- Add native incr support for GCSBackend (#9302) +- fix(perform_pending_operations): prevent task duplication on shutdown… (#9348) +- Update grpcio to 1.67.0 (#9365) +- Update google-cloud-firestore to 2.19.0 (#9364) +- Annotate celery/utils/timer2.py (#9362) +- Update cryptography to 43.0.3 (#9366) +- Update mypy to 1.12.1 (#9368) +- Bump mypy from 1.12.1 to 1.13.0 (#9373) +- Pass timeout and confirm_timeout to producer.publish() (#9374) +- Bump Kombu to v5.5.0rc2 (#9382) +- Bump pytest-cov from 5.0.0 to 6.0.0 (#9388) +- default strict to False for pydantic tasks (#9393) +- Only log that global QoS is disabled if using amqp (#9395) +- chore: update sponsorship logo (#9398) +- Allow custom hostname for celery_worker in celery.contrib.pytest / celery.contrib.testing.worker (#9405) +- Removed docker-docs from CI (optional job, malfunctioning) (#9406) +- Added a utility to format changelogs from the auto-generated GitHub release notes (#9408) +- Bump codecov/codecov-action from 4 to 5 (#9412) +- Update elasticsearch requirement from <=8.15.1 to <=8.16.0 (#9410) +- Native Delayed Delivery in RabbitMQ (#9207) +- Prepare for (pre) release: v5.5.0rc2 (#9416) + .. _version-5.5.0rc1: 5.5.0rc1 diff --git a/README.rst b/README.rst index aa24b66953b..bab1e57cbe8 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0rc1 (immunity) +:Version: 5.5.0rc2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 1cfecdd6eab..9794597fd52 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0rc1' +__version__ = '5.5.0rc2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 1fa6db1aadd..819d009ac51 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,173 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc2: + +5.5.0rc2 +======== + +:release-date: 2024-11-18 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 2 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Previous Pre-release Highlights +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Python 3.13 Initial Support +--------------------------- + +This release introduces the initial support for Python 3.13 with Celery. + +After upgrading to this version, please share your feedback on the Python 3.13 support. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Fix: Treat dbm.error as a corrupted schedule file (#9331) +- Pin pre-commit to latest version 4.0.1 (#9343) +- Added Python 3.13 to Dockerfiles (#9350) +- Skip test_pool_restart_import_modules on PyPy due to test issue (#9352) +- Update elastic-transport requirement from <=8.15.0 to <=8.15.1 (#9347) +- added dragonfly logo (#9353) +- Update README.rst (#9354) +- Update README.rst (#9355) +- Update mypy to 1.12.0 (#9356) +- Bump Kombu to v5.5.0rc1 (#9357) +- Fix `celery --loader` option parsing (#9361) +- Add support for Google Pub/Sub transport (#9351) +- Add native incr support for GCSBackend (#9302) +- fix(perform_pending_operations): prevent task duplication on shutdown… (#9348) +- Update grpcio to 1.67.0 (#9365) +- Update google-cloud-firestore to 2.19.0 (#9364) +- Annotate celery/utils/timer2.py (#9362) +- Update cryptography to 43.0.3 (#9366) +- Update mypy to 1.12.1 (#9368) +- Bump mypy from 1.12.1 to 1.13.0 (#9373) +- Pass timeout and confirm_timeout to producer.publish() (#9374) +- Bump Kombu to v5.5.0rc2 (#9382) +- Bump pytest-cov from 5.0.0 to 6.0.0 (#9388) +- default strict to False for pydantic tasks (#9393) +- Only log that global QoS is disabled if using amqp (#9395) +- chore: update sponsorship logo (#9398) +- Allow custom hostname for celery_worker in celery.contrib.pytest / celery.contrib.testing.worker (#9405) +- Removed docker-docs from CI (optional job, malfunctioning) (#9406) +- Added a utility to format changelogs from the auto-generated GitHub release notes (#9408) +- Bump codecov/codecov-action from 4 to 5 (#9412) +- Update elasticsearch requirement from <=8.15.1 to <=8.16.0 (#9410) +- Native Delayed Delivery in RabbitMQ (#9207) +- Prepare for (pre) release: v5.5.0rc2 (#9416) + .. _version-5.5.0rc1: 5.5.0rc1 diff --git a/docs/history/whatsnew-5.5.rst b/docs/history/whatsnew-5.5.rst index b9ea8689619..6c346bed90a 100644 --- a/docs/history/whatsnew-5.5.rst +++ b/docs/history/whatsnew-5.5.rst @@ -168,7 +168,7 @@ Minimum Dependencies Kombu ~~~~~ -Starting from Celery v5.5, the minimum required version is Kombu 5.4. +Starting from Celery v5.5, the minimum required version is Kombu 5.5. Redis ~~~~~ diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index a51a36756de..f2cca8f3b52 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0rc1 (immunity) +:Version: 5.5.0rc2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From a4ddb43aa219ec49462c3d69b5c08894ff82be3c Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 19 Nov 2024 16:38:20 +0200 Subject: [PATCH 0902/1051] Document usage of broker_native_delayed_delivery_queue_type. (#9419) --- docs/getting-started/backends-and-brokers/rabbitmq.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index 2c6d14ab640..5a324ecdc35 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -236,3 +236,7 @@ The design is borrowed from NServiceBus. If you are interested in the implementa .. _documentation: https://docs.particular.net/transports/rabbitmq/delayed-delivery Native Delayed Delivery is automatically enabled when quorum queues are detected. + +By default the Native Delayed Delivery queues are quorum queues. +If you'd like to change them to classic queues you can set the :setting:`broker_native_delayed_delivery_queue_type` +to classic. From d90a58c0a167d3639e626ac70627efcf28c25e47 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Tue, 19 Nov 2024 16:39:53 +0200 Subject: [PATCH 0903/1051] Adjust section in what's new document regarding quorum queues support. (#9420) --- docs/history/whatsnew-5.5.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/history/whatsnew-5.5.rst b/docs/history/whatsnew-5.5.rst index 6c346bed90a..d2f5f9a7958 100644 --- a/docs/history/whatsnew-5.5.rst +++ b/docs/history/whatsnew-5.5.rst @@ -285,15 +285,17 @@ Quorum Queues Initial Support ----------------------------- This release introduces the initial support for Quorum Queues with Celery. +See the documentation for :ref:`using-quorum-queues` for more details. -See new configuration options for more details: +In addition, you can read about the new configuration options relevant for this feature: - :setting:`task_default_queue_type` - :setting:`worker_detect_quorum_queues` +- :setting:`broker_native_delayed_delivery_queue_type` REMAP_SIGTERM ------------- The REMAP_SIGTERM "hidden feature" has been tested, :ref:`documented ` and is now officially supported. This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using TERM -instead of QUIT. \ No newline at end of file +instead of QUIT. From f1ddd58647ee24bee4f74c9c4e45812728cfd514 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 20 Nov 2024 05:58:57 -0800 Subject: [PATCH 0904/1051] Update pytest-rerunfailures to 15.0 (#9422) * Update pytest-rerunfailures from 14.0 to 15.0 * Update requirements/test.txt --------- Co-authored-by: Tomer Nosrati --- requirements/test.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 2eb5e7affc1..8b01ef49fa9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,6 +1,7 @@ pytest==8.3.3 pytest-celery[all]>=1.1.3 -pytest-rerunfailures==14.0 +pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" +pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" pytest-subtests==0.13.1 pytest-timeout==2.3.1 pytest-click==1.1.0 From 1b35d1d5966614ce36af75808ee21b0d2db6745d Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 20 Nov 2024 16:15:03 +0200 Subject: [PATCH 0905/1051] Document group unrolling. (#9421) --- docs/userguide/canvas.rst | 42 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index f9c8c1d323e..8b74e38b955 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -817,6 +817,48 @@ It supports the following operations: Gather the results of all subtasks and return them in the same order as they were called (as a list). +.. _group-unrolling: + +Group Unrolling +~~~~~~~~~~~~~~~ + +A group with a single signature will be unrolled to a single signature when chained. +This means that the following group may pass either a list of results or a single result to the chain +depending on the number of items in the group. + +.. code-block:: pycon + + >>> from celery import chain, group + >>> from tasks import add + >>> chain(add.s(2, 2), group(add.s(1)), add.s(1)) + add(2, 2) | add(1) | add(1) + >>> chain(add.s(2, 2), group(add.s(1), add.s(2)), add.s(1)) + add(2, 2) | %add((add(1), add(2)), 1) + +This means that you should be careful and make sure the ``add`` task can accept either a list or a single item as input +if you plan to use it as part of a larger canvas. + +.. warning:: + + In Celery 4.x the following group below would not unroll into a chain due to a bug but instead the canvas would be + upgraded into a chord. + + .. code-block:: pycon + + >>> from celery import chain, group + >>> from tasks import add + >>> chain(group(add.s(1, 1)), add.s(2)) + %add([add(1, 1)], 2) + + In Celery 5.x this bug was fixed and the group is correctly unrolled into a single signature. + + .. code-block:: pycon + + >>> from celery import chain, group + >>> from tasks import add + >>> chain(group(add.s(1, 1)), add.s(2)) + add(1, 1) | add(2) + .. _canvas-chord: Chords From 3630e467361009a8b3f3050807ed16503d5c4441 Mon Sep 17 00:00:00 2001 From: Sharuzzaman Ahmat Raslan Date: Mon, 25 Nov 2024 21:13:40 +0800 Subject: [PATCH 0906/1051] fix small typo acces -> access (#9434) fix small typo for word access, was spelled as acces --- docs/userguide/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index ef0dc811701..01b276458ec 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -1670,7 +1670,7 @@ Default: None. The s3 access key id. For example:: - s3_access_key_id = 'acces_key_id' + s3_access_key_id = 'access_key_id' .. setting:: s3_secret_access_key @@ -1681,7 +1681,7 @@ Default: None. The s3 secret access key. For example:: - s3_secret_access_key = 'acces_secret_access_key' + s3_secret_access_key = 'access_secret_access_key' .. setting:: s3_bucket From 5b5e9f31bf608b1c7eff9c22855a408d8ee42500 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Wed, 27 Nov 2024 14:09:13 -0800 Subject: [PATCH 0907/1051] Update cryptography from 43.0.3 to 44.0.0 (#9437) --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 75287dd9fb0..afd08f7b18e 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==43.0.3 +cryptography==44.0.0 From d02bdda42f92b791ec1fd1a69be3bcdf0615cbde Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 28 Nov 2024 16:43:24 +0200 Subject: [PATCH 0908/1051] Added pypy to Dockerfile (#9438) --- docker/Dockerfile | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index e40faa71f56..3cc2a3aff38 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -71,11 +71,11 @@ RUN pyenv install 3.11 RUN pyenv install 3.10 RUN pyenv install 3.9 RUN pyenv install 3.8 - +RUN pyenv install pypy3.10 # Set global Python versions -RUN pyenv global 3.12 3.11 3.10 3.9 3.8 +RUN pyenv global 3.12 3.11 3.10 3.9 3.8 pypy3.10 # Install celery WORKDIR $HOME @@ -84,14 +84,15 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint RUN chmod gu+x /entrypoint # Define the local pyenvs -RUN pyenv local 3.13 3.12 3.11 3.10 3.9 3.8 +RUN pyenv local 3.13 3.12 3.11 3.10 3.9 3.8 pypy3.10 RUN pyenv exec python3.13 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \ - pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel + pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ + pyenv exec pypy3.10 -m pip install --upgrade pip setuptools wheel COPY --chown=1000:1000 . $HOME/celery @@ -100,7 +101,8 @@ RUN pyenv exec python3.13 -m pip install -e $HOME/celery && \ pyenv exec python3.11 -m pip install -e $HOME/celery && \ pyenv exec python3.10 -m pip install -e $HOME/celery && \ pyenv exec python3.9 -m pip install -e $HOME/celery && \ - pyenv exec python3.8 -m pip install -e $HOME/celery + pyenv exec python3.8 -m pip install -e $HOME/celery && \ + pyenv exec pypy3.10 -m pip install -e $HOME/celery # Setup one celery environment for basic development use RUN pyenv exec python3.13 -m pip install -r requirements/default.txt \ @@ -156,6 +158,15 @@ RUN pyenv exec python3.13 -m pip install -r requirements/default.txt \ -r requirements/test-ci-default.txt \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ + -r requirements/test.txt && \ + pyenv exec pypy3.10 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ -r requirements/test.txt WORKDIR $HOME/celery From 66e48221542ffe6be89c6dc2b74c75d741ed3122 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 28 Nov 2024 16:55:38 +0200 Subject: [PATCH 0909/1051] Skipped flaky tests on pypy (all pass after ~10 reruns) (#9439) --- t/unit/concurrency/test_prefork.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index eda7cee519f..bea0cd9481d 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -368,6 +368,7 @@ def _get_hub(self): hub.close = Mock(name='hub.close()') return hub + @t.skip.if_pypy def test_schedule_writes_hub_remove_writer_ready_fd_not_in_all_inqueues(self): pool = asynpool.AsynPool(threads=False) hub = self._get_hub() @@ -386,6 +387,7 @@ def test_schedule_writes_hub_remove_writer_ready_fd_not_in_all_inqueues(self): assert 6 in hub.readers assert 6 not in hub.writers + @t.skip.if_pypy def test_schedule_writes_hub_remove_writers_from_active_writers_when_get_index_error(self): pool = asynpool.AsynPool(threads=False) hub = self._get_hub() @@ -420,6 +422,7 @@ def test_schedule_writes_hub_remove_writers_from_active_writers_when_get_index_e assert 6 in hub.writers + @t.skip.if_pypy def test_schedule_writes_hub_remove_fd_only_from_writers_when_write_job_is_done(self): pool = asynpool.AsynPool(threads=False) hub = self._get_hub() @@ -450,6 +453,7 @@ def test_schedule_writes_hub_remove_fd_only_from_writers_when_write_job_is_done( assert 2 not in hub.writers assert 2 in hub.readers + @t.skip.if_pypy def test_register_with_event_loop__no_on_tick_dupes(self): """Ensure AsynPool's register_with_event_loop only registers on_poll_start in the event loop the first time it's called. This @@ -461,6 +465,7 @@ def test_register_with_event_loop__no_on_tick_dupes(self): pool.register_with_event_loop(hub) hub.on_tick.add.assert_called_once() + @t.skip.if_pypy @patch('billiard.pool.Pool._create_worker_process') def test_before_create_process_signal(self, create_process): from celery import signals From 3f2aed4b565d0d9dc4c8dc7fec6e8516a29017ba Mon Sep 17 00:00:00 2001 From: PieterBlomme Date: Thu, 28 Nov 2024 16:48:15 +0100 Subject: [PATCH 0910/1051] Allowing managed credentials for azureblockblob (#9430) * Allowing managed credentials for auzreblockblob * Update azureblockblob.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Only change flow for Azure Identity * Adding testcases * flake8 fixes * Code assistant was a bit overzealous --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Pieter Blomme --- celery/backends/azureblockblob.py | 33 +++++++++++++++++++++---- requirements/extras/azureblockblob.txt | 1 + t/unit/backends/test_azureblockblob.py | 34 ++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 5 deletions(-) diff --git a/celery/backends/azureblockblob.py b/celery/backends/azureblockblob.py index 862777b5fdb..3648cbe4172 100644 --- a/celery/backends/azureblockblob.py +++ b/celery/backends/azureblockblob.py @@ -1,4 +1,5 @@ """The Azure Storage Block Blob backend for Celery.""" +from kombu.transport.azurestoragequeues import Transport as AzureStorageQueuesTransport from kombu.utils import cached_property from kombu.utils.encoding import bytes_to_str @@ -28,6 +29,13 @@ def __init__(self, container_name=None, *args, **kwargs): + """ + Supported URL formats: + + azureblockblob://CONNECTION_STRING + azureblockblob://DefaultAzureCredential@STORAGE_ACCOUNT_URL + azureblockblob://ManagedIdentityCredential@STORAGE_ACCOUNT_URL + """ super().__init__(*args, **kwargs) if azurestorage is None or azurestorage.__version__ < '12': @@ -65,11 +73,26 @@ def _blob_service_client(self): the container is created if it doesn't yet exist. """ - client = BlobServiceClient.from_connection_string( - self._connection_string, - connection_timeout=self._connection_timeout, - read_timeout=self._read_timeout - ) + if ( + "DefaultAzureCredential" in self._connection_string or + "ManagedIdentityCredential" in self._connection_string + ): + # Leveraging the work that Kombu already did for us + credential_, url = AzureStorageQueuesTransport.parse_uri( + self._connection_string + ) + client = BlobServiceClient( + account_url=url, + credential=credential_, + connection_timeout=self._connection_timeout, + read_timeout=self._read_timeout, + ) + else: + client = BlobServiceClient.from_connection_string( + self._connection_string, + connection_timeout=self._connection_timeout, + read_timeout=self._read_timeout, + ) try: client.create_container(name=self._container_name) diff --git a/requirements/extras/azureblockblob.txt b/requirements/extras/azureblockblob.txt index f8329f38c8d..3ecebd5beb8 100644 --- a/requirements/extras/azureblockblob.txt +++ b/requirements/extras/azureblockblob.txt @@ -1 +1,2 @@ azure-storage-blob>=12.15.0 +azure-identity>=1.19.0 \ No newline at end of file diff --git a/t/unit/backends/test_azureblockblob.py b/t/unit/backends/test_azureblockblob.py index 36ca91d82cb..434040dcd07 100644 --- a/t/unit/backends/test_azureblockblob.py +++ b/t/unit/backends/test_azureblockblob.py @@ -61,6 +61,40 @@ def test_create_client(self, mock_blob_service_factory): assert backend._blob_service_client is not None assert mock_blob_service_client_instance.create_container.call_count == 1 + @patch(MODULE_TO_MOCK + ".AzureStorageQueuesTransport") + @patch(MODULE_TO_MOCK + ".BlobServiceClient") + def test_create_client__default_azure_credentials(self, mock_blob_service_client, mock_kombu_transport): + credential_mock = Mock() + mock_blob_service_client.return_value = Mock() + mock_kombu_transport.parse_uri.return_value = (credential_mock, "dummy_account_url") + url = "azureblockblob://DefaultAzureCredential@dummy_account_url" + backend = AzureBlockBlobBackend(app=self.app, url=url) + assert backend._blob_service_client is not None + mock_kombu_transport.parse_uri.assert_called_once_with(url.replace("azureblockblob://", "")) + mock_blob_service_client.assert_called_once_with( + account_url="dummy_account_url", + credential=credential_mock, + connection_timeout=backend._connection_timeout, + read_timeout=backend._read_timeout, + ) + + @patch(MODULE_TO_MOCK + ".AzureStorageQueuesTransport") + @patch(MODULE_TO_MOCK + ".BlobServiceClient") + def test_create_client__managed_identity_azure_credentials(self, mock_blob_service_client, mock_kombu_transport): + credential_mock = Mock() + mock_blob_service_client.return_value = Mock() + mock_kombu_transport.parse_uri.return_value = (credential_mock, "dummy_account_url") + url = "azureblockblob://ManagedIdentityCredential@dummy_account_url" + backend = AzureBlockBlobBackend(app=self.app, url=url) + assert backend._blob_service_client is not None + mock_kombu_transport.parse_uri.assert_called_once_with(url.replace("azureblockblob://", "")) + mock_blob_service_client.assert_called_once_with( + account_url="dummy_account_url", + credential=credential_mock, + connection_timeout=backend._connection_timeout, + read_timeout=backend._read_timeout, + ) + @patch(MODULE_TO_MOCK + ".BlobServiceClient") def test_configure_client(self, mock_blob_service_factory): From eb559a6a10ee591aa034337bfa3a31ccd0182f62 Mon Sep 17 00:00:00 2001 From: Helio Machado <0x2b3bfa0+git@googlemail.com> Date: Sun, 1 Dec 2024 12:59:55 +0100 Subject: [PATCH 0911/1051] Allow passing Celery objects to the Click entry point (#9426) * Allow passing Celery objects to the Click entry point * Enhance code comment --- celery/bin/celery.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index da1fff5be24..4ddf9c7fc7a 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -62,6 +62,11 @@ '--app', envvar='APP', cls=CeleryOption, + # May take either: a str when invoked from command line (Click), + # or a Celery object when invoked from inside Celery; hence the + # need to prevent Click from "processing" the Celery object and + # converting it into its str representation. + type=click.UNPROCESSED, help_group="Global Options") @click.option('-b', '--broker', @@ -131,25 +136,26 @@ def celery(ctx, app, broker, result_backend, loader, config, workdir, if skip_checks: os.environ['CELERY_SKIP_CHECKS'] = 'true' - try: - app_object = find_app(app) - except ModuleNotFoundError as e: - if e.name != app: + if isinstance(app, str): + try: + app = find_app(app) + except ModuleNotFoundError as e: + if e.name != app: + exc = traceback.format_exc() + ctx.fail( + UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc) + ) + ctx.fail(UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND.format(e.name)) + except AttributeError as e: + attribute_name = e.args[0].capitalize() + ctx.fail(UNABLE_TO_LOAD_APP_APP_MISSING.format(attribute_name)) + except Exception: exc = traceback.format_exc() ctx.fail( UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc) ) - ctx.fail(UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND.format(e.name)) - except AttributeError as e: - attribute_name = e.args[0].capitalize() - ctx.fail(UNABLE_TO_LOAD_APP_APP_MISSING.format(attribute_name)) - except Exception: - exc = traceback.format_exc() - ctx.fail( - UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc) - ) - ctx.obj = CLIContext(app=app_object, no_color=no_color, workdir=workdir, + ctx.obj = CLIContext(app=app, no_color=no_color, workdir=workdir, quiet=quiet) # User options From 4d129fde1b037f4d5ab6b546f055d02b5e2a3c3c Mon Sep 17 00:00:00 2001 From: Wout De Nolf Date: Sun, 1 Dec 2024 16:54:45 +0100 Subject: [PATCH 0912/1051] support Request termination for gevent (#9440) * support Request termination for gevent * a greenlet should not be killed twice --- celery/concurrency/gevent.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/celery/concurrency/gevent.py b/celery/concurrency/gevent.py index 4855ae6fce2..fd58e91be8f 100644 --- a/celery/concurrency/gevent.py +++ b/celery/concurrency/gevent.py @@ -1,5 +1,6 @@ """Gevent execution pool.""" import functools +import types from time import monotonic from kombu.asynchronous import timer as _timer @@ -121,6 +122,7 @@ def on_apply(self, target, args=None, kwargs=None, callback=None, target, args, kwargs, callback, accept_callback, self.getpid, timeout=timeout, timeout_callback=timeout_callback) self._add_to_pool_map(id(greenlet), greenlet) + greenlet.terminate = types.MethodType(_terminate, greenlet) return greenlet def grow(self, n=1): @@ -162,3 +164,8 @@ def _add_to_pool_map(self, pid, greenlet): @staticmethod def _cleanup_after_job_finish(greenlet, pool_map, pid): del pool_map[pid] + + +def _terminate(self, signal): + # Done in `TaskPool.terminate_job` + pass From 01863590513efb32e85e90f22966f449dda68381 Mon Sep 17 00:00:00 2001 From: Wei Wei <49308161+Androidown@users.noreply.github.com> Date: Mon, 2 Dec 2024 00:52:12 +0800 Subject: [PATCH 0913/1051] Prevent event_mask from being overwritten. (#9432) * Prevent event_mask being overwritten * fix typo * add test case --------- Co-authored-by: weiwei Co-authored-by: Tomer Nosrati --- celery/concurrency/asynpool.py | 19 ++++++++++++++----- t/unit/concurrency/test_prefork.py | 10 ++++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 7f51307c6c4..dd2f068a215 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -103,26 +103,35 @@ def _get_job_writer(job): return writer() # is a weakref +def _ensure_integral_fd(fd): + return fd if isinstance(fd, Integral) else fd.fileno() + + if hasattr(select, 'poll'): def _select_imp(readers=None, writers=None, err=None, timeout=0, poll=select.poll, POLLIN=select.POLLIN, POLLOUT=select.POLLOUT, POLLERR=select.POLLERR): poller = poll() register = poller.register + fd_to_mask = {} if readers: - [register(fd, POLLIN) for fd in readers] + for fd in map(_ensure_integral_fd, readers): + fd_to_mask[fd] = fd_to_mask.get(fd, 0) | POLLIN if writers: - [register(fd, POLLOUT) for fd in writers] + for fd in map(_ensure_integral_fd, writers): + fd_to_mask[fd] = fd_to_mask.get(fd, 0) | POLLOUT if err: - [register(fd, POLLERR) for fd in err] + for fd in map(_ensure_integral_fd, err): + fd_to_mask[fd] = fd_to_mask.get(fd, 0) | POLLERR + + for fd, event_mask in fd_to_mask.items(): + register(fd, event_mask) R, W = set(), set() timeout = 0 if timeout and timeout < 0 else round(timeout * 1e3) events = poller.poll(timeout) for fd, event in events: - if not isinstance(fd, Integral): - fd = fd.fileno() if event & POLLIN: R.add(fd) if event & POLLOUT: diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py index bea0cd9481d..ea42c09bad9 100644 --- a/t/unit/concurrency/test_prefork.py +++ b/t/unit/concurrency/test_prefork.py @@ -1,6 +1,7 @@ import errno import os import socket +import tempfile from itertools import cycle from unittest.mock import Mock, patch @@ -293,6 +294,15 @@ def se2(*args): with pytest.raises(socket.error): asynpool._select({3}, poll=poll) + def test_select_unpatched(self): + with tempfile.TemporaryFile('w') as f: + _, writeable, _ = asynpool._select(writers={f, }, err={f, }) + assert f.fileno() in writeable + + with tempfile.TemporaryFile('r') as f: + readable, _, _ = asynpool._select(readers={f, }, err={f, }) + assert f.fileno() in readable + def test_promise(self): fun = Mock() x = asynpool.promise(fun, (1,), {'foo': 1}) From e3eaa675ee1e48d03a018f0abe763cc1dfb380a5 Mon Sep 17 00:00:00 2001 From: "pyup.io bot" Date: Sun, 1 Dec 2024 12:17:40 -0800 Subject: [PATCH 0914/1051] Update pytest from 8.3.3 to 8.3.4 (#9444) --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 8b01ef49fa9..f61d5f7e661 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.3.3 +pytest==8.3.4 pytest-celery[all]>=1.1.3 pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" From a8e10bc73b09028b563321de412337da1a6f3c1b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 3 Dec 2024 13:58:31 +0200 Subject: [PATCH 0915/1051] Prepare for (pre) release: v5.5.0rc3 (#9450) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0rc2 → 5.5.0rc3 * Added Changelog for v5.5.0rc3 --- .bumpversion.cfg | 2 +- Changelog.rst | 138 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/includes/introduction.txt | 2 +- 5 files changed, 142 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0ab9df2c382..058290c3d76 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0rc2 +current_version = 5.5.0rc3 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 56bb0880f31..4190f64a055 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,144 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc3: + +5.5.0rc3 +======== + +:release-date: 2024-12-03 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 3 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Document usage of broker_native_delayed_delivery_queue_type (#9419) +- Adjust section in what's new document regarding quorum queues support (#9420) +- Update pytest-rerunfailures to 15.0 (#9422) +- Document group unrolling (#9421) +- fix small typo acces -> access (#9434) +- Update cryptography to 44.0.0 (#9437) +- Added pypy to Dockerfile (#9438) +- Skipped flaky tests on pypy (all pass after ~10 reruns) (#9439) +- Allowing managed credentials for azureblockblob (#9430) +- Allow passing Celery objects to the Click entry point (#9426) +- support Request termination for gevent (#9440) +- Prevent event_mask from being overwritten. (#9432) +- Update pytest to 8.3.4 (#9444) +- Prepare for (pre) release: v5.5.0rc3 (#9450) + .. _version-5.5.0rc2: 5.5.0rc2 diff --git a/README.rst b/README.rst index bab1e57cbe8..bc9c862325d 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0rc2 (immunity) +:Version: 5.5.0rc3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 9794597fd52..276fba07c8f 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0rc2' +__version__ = '5.5.0rc3' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index f2cca8f3b52..bdb55b41b22 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0rc2 (immunity) +:Version: 5.5.0rc3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 3baea167a301ca4d2d6163ff44574736586cdb53 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 12 Dec 2024 20:55:40 +0200 Subject: [PATCH 0916/1051] Bugfix: SIGQUIT not initiating cold shutdown when `task_acks_late=False` (#9461) --- celery/worker/consumer/consumer.py | 15 +++++++++++++-- t/unit/worker/test_consumer.py | 3 +-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index a66f5443872..3e6a66df532 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -734,10 +734,21 @@ def __repr__(self): ) def cancel_all_unacked_requests(self): - """Cancel all unacked requests with late acknowledgement enabled.""" + """Cancel all active requests that either do not require late acknowledgments or, + if they do, have not been acknowledged yet. + """ def should_cancel(request): - return request.task.acks_late and not request.acknowledged + if not request.task.acks_late: + # Task does not require late acknowledgment, cancel it. + return True + + if not request.acknowledged: + # Task is late acknowledged, but it has not been acknowledged yet, cancel it. + return True + + # Task is late acknowledged, but it has already been acknowledged. + return False # Do not cancel and allow it to gracefully finish as it has already been acknowledged. requests_to_cancel = tuple(filter(should_cancel, active_requests)) diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py index 3523e18056d..04d167e3d83 100644 --- a/t/unit/worker/test_consumer.py +++ b/t/unit/worker/test_consumer.py @@ -460,7 +460,6 @@ def test_cancel_all_unacked_requests(self): mock_request_acks_late_acknowledged.acknowledged = True mock_request_acks_early = Mock(id='3') mock_request_acks_early.task.acks_late = False - mock_request_acks_early.acknowledged = False active_requests.add(mock_request_acks_late_not_acknowledged) active_requests.add(mock_request_acks_late_acknowledged) @@ -470,7 +469,7 @@ def test_cancel_all_unacked_requests(self): mock_request_acks_late_not_acknowledged.cancel.assert_called_once_with(c.pool) mock_request_acks_late_acknowledged.cancel.assert_not_called() - mock_request_acks_early.cancel.assert_not_called() + mock_request_acks_early.cancel.assert_called_once_with(c.pool) active_requests.clear() From f0ff79cff6a8794174a6e2e189ed1431257b6406 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Dec 2024 19:04:26 +0200 Subject: [PATCH 0917/1051] Fixed pycurl dep with Python 3.8 (#9471) --- requirements/extras/sqs.txt | 3 ++- requirements/test-ci-default.txt | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 03d1687cfcd..43ee109e8c6 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1,4 +1,5 @@ boto3>=1.26.143 -pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython" +pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" +pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" urllib3>=1.26.16 kombu[sqs]>=5.3.4 diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index 0ab2b79da06..78994fa8e45 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -21,5 +21,5 @@ git+https://github.com/celery/kombu.git # SQS dependencies other than boto -pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython" - +pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" +pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" From d9e4c8c6c7d236b2ed2ca8a121c283325eb9cfaa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 19:49:43 +0200 Subject: [PATCH 0918/1051] Update elasticsearch requirement from <=8.16.0 to <=8.17.0 (#9469) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.17.0) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 269031ec71e..a729c4ae794 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.16.0 +elasticsearch<=8.17.0 elastic-transport<=8.15.1 From 49f8f712f2e395ab32244b231d319e2484efea7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:55:41 +0200 Subject: [PATCH 0919/1051] Bump pytest-subtests from 0.13.1 to 0.14.1 (#9459) * Bump pytest-subtests from 0.13.1 to 0.14.1 Bumps [pytest-subtests](https://github.com/pytest-dev/pytest-subtests) from 0.13.1 to 0.14.1. - [Release notes](https://github.com/pytest-dev/pytest-subtests/releases) - [Changelog](https://github.com/pytest-dev/pytest-subtests/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-subtests/compare/v0.13.1...v0.14.1) --- updated-dependencies: - dependency-name: pytest-subtests dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update requirements/test.txt --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tomer Nosrati --- requirements/test.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index f61d5f7e661..ca0b8da0610 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -2,7 +2,8 @@ pytest==8.3.4 pytest-celery[all]>=1.1.3 pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" -pytest-subtests==0.13.1 +pytest-subtests<0.14.0; python_version < "3.9" +pytest-subtests>=0.14.1; python_version >= "3.9" pytest-timeout==2.3.1 pytest-click==1.1.0 pytest-order==1.3.0 From 9b10ed6db01f5187f8ef0f56442c5eed512edb5e Mon Sep 17 00:00:00 2001 From: Avamander Date: Thu, 19 Dec 2024 02:43:36 +0200 Subject: [PATCH 0920/1051] Added a type annotation to the periodic task example (#9473) --- docs/userguide/periodic-tasks.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index 1928b1f9ac3..c185115e628 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -90,7 +90,7 @@ beat schedule list. app = Celery() @app.on_after_configure.connect - def setup_periodic_tasks(sender, **kwargs): + def setup_periodic_tasks(sender: Celery, **kwargs): # Calls test('hello') every 10 seconds. sender.add_periodic_task(10.0, test.s('hello'), name='add every 10') From 9ad7d54a25b456111bbce105ed7c654c8ff42263 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 19 Dec 2024 17:08:22 +0200 Subject: [PATCH 0921/1051] Prepare for (pre) release: v5.5.0rc4 (#9474) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0rc3 → 5.5.0rc4 * Added Changelog for v5.5.0rc4 --- .bumpversion.cfg | 2 +- Changelog.rst | 130 ++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 268 +++++++++++++++++++++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 402 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 058290c3d76..149c341155a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0rc3 +current_version = 5.5.0rc4 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 4190f64a055..9357c597f9c 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,136 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc4: + +5.5.0rc4 +======== + +:release-date: 2024-12-19 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 4 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bugfix: SIGQUIT not initiating cold shutdown when `task_acks_late=False` (#9461) +- Fixed pycurl dep with Python 3.8 (#9471) +- Update elasticsearch requirement from <=8.16.0 to <=8.17.0 (#9469) +- Bump pytest-subtests from 0.13.1 to 0.14.1 (#9459) +- documentation: Added a type annotation to the periodic task example (#9473) +- Prepare for (pre) release: v5.5.0rc4 (#9474) + .. _version-5.5.0rc3: 5.5.0rc3 diff --git a/README.rst b/README.rst index bc9c862325d..1acac3a69fd 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0rc3 (immunity) +:Version: 5.5.0rc4 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 276fba07c8f..0557678fc68 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0rc3' +__version__ = '5.5.0rc4' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 819d009ac51..e62f3997dbe 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,274 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc4: + +5.5.0rc4 +======== + +:release-date: 2024-12-19 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 4 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bugfix: SIGQUIT not initiating cold shutdown when `task_acks_late=False` (#9461) +- Fixed pycurl dep with Python 3.8 (#9471) +- Update elasticsearch requirement from <=8.16.0 to <=8.17.0 (#9469) +- Bump pytest-subtests from 0.13.1 to 0.14.1 (#9459) +- documentation: Added a type annotation to the periodic task example (#9473) +- Prepare for (pre) release: v5.5.0rc4 (#9474) + +.. _version-5.5.0rc3: + +5.5.0rc3 +======== + +:release-date: 2024-12-03 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 3 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc2 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is current at 5.5.0rc2. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Document usage of broker_native_delayed_delivery_queue_type (#9419) +- Adjust section in what's new document regarding quorum queues support (#9420) +- Update pytest-rerunfailures to 15.0 (#9422) +- Document group unrolling (#9421) +- fix small typo acces -> access (#9434) +- Update cryptography to 44.0.0 (#9437) +- Added pypy to Dockerfile (#9438) +- Skipped flaky tests on pypy (all pass after ~10 reruns) (#9439) +- Allowing managed credentials for azureblockblob (#9430) +- Allow passing Celery objects to the Click entry point (#9426) +- support Request termination for gevent (#9440) +- Prevent event_mask from being overwritten. (#9432) +- Update pytest to 8.3.4 (#9444) +- Prepare for (pre) release: v5.5.0rc3 (#9450) + .. _version-5.5.0rc2: 5.5.0rc2 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index bdb55b41b22..5bc0021d226 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0rc3 (immunity) +:Version: 5.5.0rc4 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 1129272c3264b6c3e152e699b4a3ef49d185f2c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 21 Dec 2024 04:48:17 +0200 Subject: [PATCH 0922/1051] Bump mypy from 1.13.0 to 1.14.0 (#9476) Bumps [mypy](https://github.com/python/mypy) from 1.13.0 to 1.14.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.13.0...v1.14.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index ca0b8da0610..4a8adf99fab 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.13.0; platform_python_implementation=="CPython" +mypy==1.14.0; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt From 1fef0cc4add1c771f172e5c2bfefc8a0701ab157 Mon Sep 17 00:00:00 2001 From: kairi Date: Mon, 23 Dec 2024 22:30:40 +0900 Subject: [PATCH 0923/1051] Fix cassandra backend port settings not working (#9465) * fix: cassandra port is forced to default value * test: add assertions for CassandraBackend port configuration * fix: use default port even if cassandra_port is set to None in config * fix: set cassandra_port to None to ensure default port usage in test_options --- celery/backends/cassandra.py | 4 ++-- t/unit/backends/test_cassandra.py | 10 +++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/celery/backends/cassandra.py b/celery/backends/cassandra.py index 0eb37f31ba8..4ca071d2d03 100644 --- a/celery/backends/cassandra.py +++ b/celery/backends/cassandra.py @@ -86,7 +86,7 @@ class CassandraBackend(BaseBackend): supports_autoexpire = True # autoexpire supported via entry_ttl def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None, - port=9042, bundle_path=None, **kwargs): + port=None, bundle_path=None, **kwargs): super().__init__(**kwargs) if not cassandra: @@ -96,7 +96,7 @@ def __init__(self, servers=None, keyspace=None, table=None, entry_ttl=None, self.servers = servers or conf.get('cassandra_servers', None) self.bundle_path = bundle_path or conf.get( 'cassandra_secure_bundle_path', None) - self.port = port or conf.get('cassandra_port', None) + self.port = port or conf.get('cassandra_port', None) or 9042 self.keyspace = keyspace or conf.get('cassandra_keyspace', None) self.table = table or conf.get('cassandra_table', None) self.cassandra_options = conf.get('cassandra_options', {}) diff --git a/t/unit/backends/test_cassandra.py b/t/unit/backends/test_cassandra.py index 9bf8a480f3d..b51b51d056c 100644 --- a/t/unit/backends/test_cassandra.py +++ b/t/unit/backends/test_cassandra.py @@ -267,4 +267,12 @@ def test_options(self): 'cql_version': '3.2.1', 'protocol_version': 3 } - mod.CassandraBackend(app=self.app) + self.app.conf.cassandra_port = None + x = mod.CassandraBackend(app=self.app) + # Default port is 9042 + assert x.port == 9042 + + # Valid options with port specified + self.app.conf.cassandra_port = 1234 + x = mod.CassandraBackend(app=self.app) + assert x.port == 1234 From 40c7e9b6620fa6ccee0d744b3cb7b96cb5e73e61 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 19:12:41 +0200 Subject: [PATCH 0924/1051] [pre-commit.ci] pre-commit autoupdate (#9478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/asottile/pyupgrade: v3.19.0 → v3.19.1](https://github.com/asottile/pyupgrade/compare/v3.19.0...v3.19.1) - [github.com/pre-commit/mirrors-mypy: v1.13.0 → v1.14.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.13.0...v1.14.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 779461c2657..15abbf127e9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/asottile/pyupgrade - rev: v3.19.0 + rev: v3.19.1 hooks: - id: pyupgrade args: ["--py38-plus"] @@ -39,7 +39,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.13.0 + rev: v1.14.0 hooks: - id: mypy pass_filenames: false From 3ae15c1dfc80c102aed05ad9c1d38470218b2fc8 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 25 Dec 2024 17:12:21 +0200 Subject: [PATCH 0925/1051] Unroll group when chaining a group with a single item to another signature when using the | operator. (#9456) Add documentation. Fix tests. --- celery/canvas.py | 4 ++++ docs/userguide/canvas.rst | 22 ++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 25 +++++++++++++++++++++++-- t/unit/tasks/test_tasks.py | 2 +- 4 files changed, 50 insertions(+), 3 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 9f4d2f0ce74..748445f7a27 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1583,6 +1583,10 @@ def __call__(self, *partial_args, **options): def __or__(self, other): # group() | task -> chord + # If the group is unrolled, return a chain instead + g = maybe_unroll_group(self) + if not isinstance(g, group): + return g | other return chord(self, body=other, app=self._app) def skew(self, start=1.0, stop=None, step=1.0): diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 8b74e38b955..c701bdc39f7 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -859,6 +859,28 @@ if you plan to use it as part of a larger canvas. >>> chain(group(add.s(1, 1)), add.s(2)) add(1, 1) | add(2) +.. warning:: + + .. versionadded:: 5.5 + + Before Celery 5.5 the following group would be upgraded to a chord instead of being unrolled: + + .. code-block:: pycon + + >>> from celery import chain, group + >>> from tasks import add + >>> group(add.s(1, 1)) | add.s(2) + %add([add(1, 1)], 2) + + This was fixed in Celery 5.5 and now the group is correctly unrolled into a single signature. + + .. code-block:: pycon + + >>> from celery import chain, group + >>> from tasks import add + >>> group(add.s(1, 1)) | add.s(2) + add(1, 1) | add(2) + .. _canvas-chord: Chords diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 1f901376205..224f8ca7465 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -564,7 +564,7 @@ def test_chain_of_chord__or__group_of_single_task(self): assert isinstance(new_chain.tasks[0].body, _chain) def test_chain_of_chord_upgrade_on_chaining(self): - c = chord([signature('header')], group(signature('body'))) + c = chord([signature('header')], group(signature('body'), signature('body2'))) c = chain(c) t = signature('t') new_chain = c | t # t should be chained with the body of c[0] and create a new chord @@ -1251,6 +1251,19 @@ def test_group_prepared(self): assert isinstance(result, AsyncResult) assert group_id is not None + def test_group_unroll(self, subtests): + @self.app.task + def test_task(a, b): + return + + with subtests.test("single item"): + c = group(test_task.s(1, 2)) | test_task.s(1) + assert str(c) == "t.unit.tasks.test_canvas.test_task(1, 2) | test_task(1)" + + with subtests.test("regen"): + c = group(test_task.s(1, 2) for _ in range(1)) | test_task.s(1) + assert str(c) == "t.unit.tasks.test_canvas.test_task(1, 2) | test_task(1)" + class test_chord(CanvasCase): def test__get_app_does_not_exhaust_generator(self): @@ -1769,12 +1782,20 @@ def test_chord__or__group_of_single_task(self): def test_chord_upgrade_on_chaining(self): """ Test that chaining a chord with a group body upgrades to a new chord """ - c = chord([signature('header')], group(signature('body'))) + c = chord([signature('header')], group(signature('body'), signature('body2'))) t = signature('t') stil_chord = c | t # t should be chained with the body of c and create a new chord assert isinstance(stil_chord, chord) assert isinstance(stil_chord.body, chord) + def test_no_chord_upgrade_on_chaining_with_group_of_a_single_item(self): + """ Test that chaining a chord with a group body upgrades to a new chord """ + c = chord([signature('header')], group(signature('body'))) + t = signature('t') + stil_chord = c | t # t should be chained with the body of c and create a new chord + assert isinstance(stil_chord, chord) + assert isinstance(stil_chord.body, _chain) + @pytest.mark.parametrize('header', [ [signature('s1'), signature('s2')], group(signature('s1'), signature('s2')) diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 7462313c74f..b168fbefc9a 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1165,7 +1165,7 @@ def test_replace_with_chord(self): self.mytask.replace(sig1) def test_replace_callback(self): - c = group([self.mytask.s()], app=self.app) + c = group([self.mytask.s(), self.mytask.s()], app=self.app) c.freeze = Mock(name='freeze') c.delay = Mock(name='delay') self.mytask.request.id = 'id' From 7315c436c194ed23f7620448f902924733b5fcef Mon Sep 17 00:00:00 2001 From: Laurent Tramoy <7586076+Lotram@users.noreply.github.com> Date: Wed, 25 Dec 2024 19:09:05 +0100 Subject: [PATCH 0926/1051] fix(django): catch the right error when trying to close db connection (#9392) * fix(django): catch the right error when trying to close db connection (#9310) * chore(django): improve coverage for django fixup test --- celery/fixups/django.py | 8 +++++--- t/unit/fixups/test_django.py | 11 ++++++++--- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 5a8ca1b993a..b35499493a6 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -16,6 +16,7 @@ from types import ModuleType from typing import Protocol + from django.db.backends.base.base import BaseDatabaseWrapper from django.db.utils import ConnectionHandler from celery.app.base import Celery @@ -164,15 +165,16 @@ def on_worker_process_init(self, **kwargs: Any) -> None: # network IO that close() might cause. for c in self._db.connections.all(): if c and c.connection: - self._maybe_close_db_fd(c.connection) + self._maybe_close_db_fd(c) # use the _ version to avoid DB_REUSE preventing the conn.close() call self._close_database(force=True) self.close_cache() - def _maybe_close_db_fd(self, fd: IO) -> None: + def _maybe_close_db_fd(self, c: "BaseDatabaseWrapper") -> None: try: - _maybe_close_fd(fd) + with c.wrap_database_errors: + _maybe_close_fd(c.connection) except self.interface_errors: pass diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 72b4d60d873..c09ba61642c 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest @@ -156,6 +156,10 @@ def test_on_worker_init(self): assert f._worker_fixup is DWF.return_value +class InterfaceError(Exception): + pass + + class test_DjangoWorkerFixup(FixupCase): Fixup = DjangoWorkerFixup @@ -180,14 +184,15 @@ def test_install(self): def test_on_worker_process_init(self, patching): with self.fixup_context(self.app) as (f, _, _): - with patch('celery.fixups.django._maybe_close_fd') as mcf: + with patch('celery.fixups.django._maybe_close_fd', side_effect=InterfaceError) as mcf: _all = f._db.connections.all = Mock() conns = _all.return_value = [ - Mock(), Mock(), + Mock(), MagicMock(), ] conns[0].connection = None with patch.object(f, 'close_cache'): with patch.object(f, '_close_database'): + f.interface_errors = (InterfaceError, ) f.on_worker_process_init() mcf.assert_called_with(conns[1].connection) f.close_cache.assert_called_with() From 48aaadedfcde043fa973ff2176abbb5fec9691e5 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 8 Jan 2025 17:52:14 +0200 Subject: [PATCH 0927/1051] Replacing a task with a chain which contains a group now returns a result instead of hanging. (#9484) --- celery/app/task.py | 2 ++ t/integration/tasks.py | 5 +++++ t/integration/test_canvas.py | 13 ++++++++++--- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 951c75824b7..2fdff06fd48 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -937,6 +937,8 @@ def replace(self, sig): if isinstance(sig, group): # Groups get uplifted to a chord so that we can link onto the body sig |= self.app.tasks['celery.accumulate'].s(index=0) + if isinstance(sig, _chain) and isinstance(sig.tasks[-1], group): + sig.tasks[-1] |= self.app.tasks['celery.accumulate'].s(index=0) for callback in maybe_list(self.request.callbacks) or []: sig.link(callback) for errback in maybe_list(self.request.errbacks) or []: diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 031c89e002e..27338226559 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -153,6 +153,11 @@ def replace_with_empty_chain(self, *_): return self.replace(chain()) +@shared_task(bind=True) +def replace_with_chain_which_contains_a_group(self): + return self.replace(chain(add.s(1, 2), group(add.s(1), add.s(1)))) + + @shared_task(bind=True) def add_to_all(self, nums, val): """Add the given value to all supplied numbers.""" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index d2474fa2351..77e584a03fe 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -21,9 +21,9 @@ add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, mul, print_unicode, raise_error, redis_count, redis_echo, redis_echo_group_id, - replace_with_chain, replace_with_chain_which_raises, replace_with_empty_chain, - replace_with_stamped_task, retry_once, return_exception, return_priority, second_order_replace1, - tsum, write_to_file_and_return_int, xsum) + replace_with_chain, replace_with_chain_which_contains_a_group, replace_with_chain_which_raises, + replace_with_empty_chain, replace_with_stamped_task, retry_once, return_exception, + return_priority, second_order_replace1, tsum, write_to_file_and_return_int, xsum) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) @@ -310,6 +310,13 @@ def test_second_order_replace(self, manager): b'Out A'] assert redis_messages == expected_messages + @flaky + def test_replace_with_chain_that_contains_a_group(self, manager): + s = replace_with_chain_which_contains_a_group.s() + + result = s.delay() + assert result.get(timeout=TIMEOUT) == [4, 4] + @flaky def test_parent_ids(self, manager, num=10): assert_ping(manager) From fe761416f4d9269b780a13cc1131e2a16945937f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 16:55:26 +0000 Subject: [PATCH 0928/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.14.0 → v1.14.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.14.0...v1.14.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15abbf127e9..e451333b2f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.0 + rev: v1.14.1 hooks: - id: mypy pass_filenames: false From 0c402b0b6a1e0e608d02c3a16453e39d8b1d9ef2 Mon Sep 17 00:00:00 2001 From: Omer Katz Date: Wed, 22 Jan 2025 16:52:40 +0200 Subject: [PATCH 0929/1051] Replacing a task with a chain which contains a group now returns a result instead of hanging. (#9510) --- t/integration/test_canvas.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 77e584a03fe..7a19616a471 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1696,23 +1696,23 @@ def test_replaced_nested_chord(self, manager): res1 = c1() assert res1.get(timeout=TIMEOUT) == [29, 38] - @flaky + # @flaky def test_add_to_chord(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') - c = group([add_to_all_to_chord.s([1, 2, 3], 4)]) | identity.s() + c = group([identity.si(1), add_to_all_to_chord.s([1, 2, 3], 4)]) | identity.s() res = c() - assert sorted(res.get()) == [0, 5, 6, 7] + assert sorted(res.get()) == [0, 1, 5, 6, 7] @flaky def test_add_chord_to_chord(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') - c = group([add_chord_to_chord.s([1, 2, 3], 4)]) | identity.s() + c = group([identity.si(1), add_chord_to_chord.s([1, 2, 3], 4)]) | identity.s() res = c() - assert sorted(res.get()) == [0, 5 + 6 + 7] + assert sorted(res.get()) == [0, 1, 5 + 6 + 7] @flaky def test_eager_chord_inside_task(self, manager): From 3fdb466d0e413362379074d1c4348d13321af203 Mon Sep 17 00:00:00 2001 From: Yigit Sever Date: Wed, 22 Jan 2025 15:53:40 +0100 Subject: [PATCH 0930/1051] Link to the correct IRC network (#9509) --- docs/includes/resources.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 4bfbfd17926..91ef547e9d2 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -21,7 +21,7 @@ IRC Come chat with us on IRC. The **#celery** channel is located at the `Libera Chat`_ network. -.. _`Libera Chat`: https://freenode.net +.. _`Libera Chat`: https://libera.chat/ .. _bug-tracker: From dc6726eaaedf756a8441bb0257c437db3f461918 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 16:53:57 +0200 Subject: [PATCH 0931/1051] Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504) Bumps [pytest-github-actions-annotate-failures](https://github.com/pytest-dev/pytest-github-actions-annotate-failures) from 0.2.0 to 0.3.0. - [Release notes](https://github.com/pytest-dev/pytest-github-actions-annotate-failures/releases) - [Changelog](https://github.com/pytest-dev/pytest-github-actions-annotate-failures/blob/main/CHANGELOG.md) - [Commits](https://github.com/pytest-dev/pytest-github-actions-annotate-failures/compare/v0.2.0...v0.3.0) --- updated-dependencies: - dependency-name: pytest-github-actions-annotate-failures dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 05ee50df850..b5649723471 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,6 +1,6 @@ pytest-cov==5.0.0; python_version<"3.9" pytest-cov==6.0.0; python_version>="3.9" -pytest-github-actions-annotate-failures==0.2.0 +pytest-github-actions-annotate-failures==0.3.0 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt From d5ebfc9d5a2d856413df1e7d14739ec58fae4dd3 Mon Sep 17 00:00:00 2001 From: Kamal Farahani <17600026+kamalfarahani@users.noreply.github.com> Date: Wed, 22 Jan 2025 18:24:31 +0330 Subject: [PATCH 0932/1051] Update canvas.rst to fix output result from chain object (#9502) The output of the following code: ```python res = (add.s(4, 4) | group(add.si(i, i) for i in range(10))) res.get() ``` should be: ``` [0, 2, 4, 6, 8, 10, 12, 14, 16, 18] ``` but in documentation is wrongly: ``` ``` --- docs/userguide/canvas.rst | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index c701bdc39f7..e5ae3062763 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -461,17 +461,7 @@ Here're some examples: >>> res = (add.s(4, 4) | group(add.si(i, i) for i in range(10)))() >>> res.get() - + [0, 2, 4, 6, 8, 10, 12, 14, 16, 18] >>> res.parent.get() 8 From 1fbfeca8ad53c63a8380b904dcc6d8b3c3d752c7 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Jan 2025 15:05:49 +0200 Subject: [PATCH 0933/1051] Unauthorized Changes Cleanup (#9528) Reverting unauthorized code changes due to security incident #9525: https://github.com/celery/celery/discussions/9525 * Revert "Update canvas.rst to fix output result from chain object (#9502)" This reverts commit d5ebfc9d5a2d856413df1e7d14739ec58fae4dd3. * Revert "Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504)" This reverts commit dc6726eaaedf756a8441bb0257c437db3f461918. * Revert "Link to the correct IRC network (#9509)" This reverts commit 3fdb466d0e413362379074d1c4348d13321af203. * Revert "Replacing a task with a chain which contains a group now returns a result instead of hanging. (#9510)" This reverts commit 0c402b0b6a1e0e608d02c3a16453e39d8b1d9ef2. * Revert "[pre-commit.ci] pre-commit autoupdate" This reverts commit fe761416f4d9269b780a13cc1131e2a16945937f. * Revert "Replacing a task with a chain which contains a group now returns a result instead of hanging. (#9484)" This reverts commit 48aaadedfcde043fa973ff2176abbb5fec9691e5. * Revert "fix(django): catch the right error when trying to close db connection (#9392)" This reverts commit 7315c436c194ed23f7620448f902924733b5fcef. * Revert "Unroll group when chaining a group with a single item to another signature when using the | operator. (#9456)" This reverts commit 3ae15c1dfc80c102aed05ad9c1d38470218b2fc8. --- .pre-commit-config.yaml | 2 +- celery/app/task.py | 2 -- celery/canvas.py | 4 ---- celery/fixups/django.py | 8 +++----- docs/includes/resources.txt | 2 +- docs/userguide/canvas.rst | 34 +++++++++++----------------------- requirements/test-ci-base.txt | 2 +- t/integration/tasks.py | 5 ----- t/integration/test_canvas.py | 23 ++++++++--------------- t/unit/fixups/test_django.py | 11 +++-------- t/unit/tasks/test_canvas.py | 25 ++----------------------- t/unit/tasks/test_tasks.py | 2 +- 12 files changed, 31 insertions(+), 89 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e451333b2f3..15abbf127e9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.1 + rev: v1.14.0 hooks: - id: mypy pass_filenames: false diff --git a/celery/app/task.py b/celery/app/task.py index 2fdff06fd48..951c75824b7 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -937,8 +937,6 @@ def replace(self, sig): if isinstance(sig, group): # Groups get uplifted to a chord so that we can link onto the body sig |= self.app.tasks['celery.accumulate'].s(index=0) - if isinstance(sig, _chain) and isinstance(sig.tasks[-1], group): - sig.tasks[-1] |= self.app.tasks['celery.accumulate'].s(index=0) for callback in maybe_list(self.request.callbacks) or []: sig.link(callback) for errback in maybe_list(self.request.errbacks) or []: diff --git a/celery/canvas.py b/celery/canvas.py index 748445f7a27..9f4d2f0ce74 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -1583,10 +1583,6 @@ def __call__(self, *partial_args, **options): def __or__(self, other): # group() | task -> chord - # If the group is unrolled, return a chain instead - g = maybe_unroll_group(self) - if not isinstance(g, group): - return g | other return chord(self, body=other, app=self._app) def skew(self, start=1.0, stop=None, step=1.0): diff --git a/celery/fixups/django.py b/celery/fixups/django.py index b35499493a6..5a8ca1b993a 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -16,7 +16,6 @@ from types import ModuleType from typing import Protocol - from django.db.backends.base.base import BaseDatabaseWrapper from django.db.utils import ConnectionHandler from celery.app.base import Celery @@ -165,16 +164,15 @@ def on_worker_process_init(self, **kwargs: Any) -> None: # network IO that close() might cause. for c in self._db.connections.all(): if c and c.connection: - self._maybe_close_db_fd(c) + self._maybe_close_db_fd(c.connection) # use the _ version to avoid DB_REUSE preventing the conn.close() call self._close_database(force=True) self.close_cache() - def _maybe_close_db_fd(self, c: "BaseDatabaseWrapper") -> None: + def _maybe_close_db_fd(self, fd: IO) -> None: try: - with c.wrap_database_errors: - _maybe_close_fd(c.connection) + _maybe_close_fd(fd) except self.interface_errors: pass diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 91ef547e9d2..4bfbfd17926 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -21,7 +21,7 @@ IRC Come chat with us on IRC. The **#celery** channel is located at the `Libera Chat`_ network. -.. _`Libera Chat`: https://libera.chat/ +.. _`Libera Chat`: https://freenode.net .. _bug-tracker: diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index e5ae3062763..8b74e38b955 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -461,7 +461,17 @@ Here're some examples: >>> res = (add.s(4, 4) | group(add.si(i, i) for i in range(10)))() >>> res.get() - [0, 2, 4, 6, 8, 10, 12, 14, 16, 18] + >>> res.parent.get() 8 @@ -849,28 +859,6 @@ if you plan to use it as part of a larger canvas. >>> chain(group(add.s(1, 1)), add.s(2)) add(1, 1) | add(2) -.. warning:: - - .. versionadded:: 5.5 - - Before Celery 5.5 the following group would be upgraded to a chord instead of being unrolled: - - .. code-block:: pycon - - >>> from celery import chain, group - >>> from tasks import add - >>> group(add.s(1, 1)) | add.s(2) - %add([add(1, 1)], 2) - - This was fixed in Celery 5.5 and now the group is correctly unrolled into a single signature. - - .. code-block:: pycon - - >>> from celery import chain, group - >>> from tasks import add - >>> group(add.s(1, 1)) | add.s(2) - add(1, 1) | add(2) - .. _canvas-chord: Chords diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index b5649723471..05ee50df850 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,6 +1,6 @@ pytest-cov==5.0.0; python_version<"3.9" pytest-cov==6.0.0; python_version>="3.9" -pytest-github-actions-annotate-failures==0.3.0 +pytest-github-actions-annotate-failures==0.2.0 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 27338226559..031c89e002e 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -153,11 +153,6 @@ def replace_with_empty_chain(self, *_): return self.replace(chain()) -@shared_task(bind=True) -def replace_with_chain_which_contains_a_group(self): - return self.replace(chain(add.s(1, 2), group(add.s(1), add.s(1)))) - - @shared_task(bind=True) def add_to_all(self, nums, val): """Add the given value to all supplied numbers.""" diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 7a19616a471..d2474fa2351 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -21,9 +21,9 @@ add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, ids, mul, print_unicode, raise_error, redis_count, redis_echo, redis_echo_group_id, - replace_with_chain, replace_with_chain_which_contains_a_group, replace_with_chain_which_raises, - replace_with_empty_chain, replace_with_stamped_task, retry_once, return_exception, - return_priority, second_order_replace1, tsum, write_to_file_and_return_int, xsum) + replace_with_chain, replace_with_chain_which_raises, replace_with_empty_chain, + replace_with_stamped_task, retry_once, return_exception, return_priority, second_order_replace1, + tsum, write_to_file_and_return_int, xsum) RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError) @@ -310,13 +310,6 @@ def test_second_order_replace(self, manager): b'Out A'] assert redis_messages == expected_messages - @flaky - def test_replace_with_chain_that_contains_a_group(self, manager): - s = replace_with_chain_which_contains_a_group.s() - - result = s.delay() - assert result.get(timeout=TIMEOUT) == [4, 4] - @flaky def test_parent_ids(self, manager, num=10): assert_ping(manager) @@ -1696,23 +1689,23 @@ def test_replaced_nested_chord(self, manager): res1 = c1() assert res1.get(timeout=TIMEOUT) == [29, 38] - # @flaky + @flaky def test_add_to_chord(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') - c = group([identity.si(1), add_to_all_to_chord.s([1, 2, 3], 4)]) | identity.s() + c = group([add_to_all_to_chord.s([1, 2, 3], 4)]) | identity.s() res = c() - assert sorted(res.get()) == [0, 1, 5, 6, 7] + assert sorted(res.get()) == [0, 5, 6, 7] @flaky def test_add_chord_to_chord(self, manager): if not manager.app.conf.result_backend.startswith('redis'): raise pytest.skip('Requires redis result backend.') - c = group([identity.si(1), add_chord_to_chord.s([1, 2, 3], 4)]) | identity.s() + c = group([add_chord_to_chord.s([1, 2, 3], 4)]) | identity.s() res = c() - assert sorted(res.get()) == [0, 1, 5 + 6 + 7] + assert sorted(res.get()) == [0, 5 + 6 + 7] @flaky def test_eager_chord_inside_task(self, manager): diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index c09ba61642c..72b4d60d873 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch import pytest @@ -156,10 +156,6 @@ def test_on_worker_init(self): assert f._worker_fixup is DWF.return_value -class InterfaceError(Exception): - pass - - class test_DjangoWorkerFixup(FixupCase): Fixup = DjangoWorkerFixup @@ -184,15 +180,14 @@ def test_install(self): def test_on_worker_process_init(self, patching): with self.fixup_context(self.app) as (f, _, _): - with patch('celery.fixups.django._maybe_close_fd', side_effect=InterfaceError) as mcf: + with patch('celery.fixups.django._maybe_close_fd') as mcf: _all = f._db.connections.all = Mock() conns = _all.return_value = [ - Mock(), MagicMock(), + Mock(), Mock(), ] conns[0].connection = None with patch.object(f, 'close_cache'): with patch.object(f, '_close_database'): - f.interface_errors = (InterfaceError, ) f.on_worker_process_init() mcf.assert_called_with(conns[1].connection) f.close_cache.assert_called_with() diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 224f8ca7465..1f901376205 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -564,7 +564,7 @@ def test_chain_of_chord__or__group_of_single_task(self): assert isinstance(new_chain.tasks[0].body, _chain) def test_chain_of_chord_upgrade_on_chaining(self): - c = chord([signature('header')], group(signature('body'), signature('body2'))) + c = chord([signature('header')], group(signature('body'))) c = chain(c) t = signature('t') new_chain = c | t # t should be chained with the body of c[0] and create a new chord @@ -1251,19 +1251,6 @@ def test_group_prepared(self): assert isinstance(result, AsyncResult) assert group_id is not None - def test_group_unroll(self, subtests): - @self.app.task - def test_task(a, b): - return - - with subtests.test("single item"): - c = group(test_task.s(1, 2)) | test_task.s(1) - assert str(c) == "t.unit.tasks.test_canvas.test_task(1, 2) | test_task(1)" - - with subtests.test("regen"): - c = group(test_task.s(1, 2) for _ in range(1)) | test_task.s(1) - assert str(c) == "t.unit.tasks.test_canvas.test_task(1, 2) | test_task(1)" - class test_chord(CanvasCase): def test__get_app_does_not_exhaust_generator(self): @@ -1781,20 +1768,12 @@ def test_chord__or__group_of_single_task(self): assert isinstance(stil_chord.body, _chain) def test_chord_upgrade_on_chaining(self): - """ Test that chaining a chord with a group body upgrades to a new chord """ - c = chord([signature('header')], group(signature('body'), signature('body2'))) - t = signature('t') - stil_chord = c | t # t should be chained with the body of c and create a new chord - assert isinstance(stil_chord, chord) - assert isinstance(stil_chord.body, chord) - - def test_no_chord_upgrade_on_chaining_with_group_of_a_single_item(self): """ Test that chaining a chord with a group body upgrades to a new chord """ c = chord([signature('header')], group(signature('body'))) t = signature('t') stil_chord = c | t # t should be chained with the body of c and create a new chord assert isinstance(stil_chord, chord) - assert isinstance(stil_chord.body, _chain) + assert isinstance(stil_chord.body, chord) @pytest.mark.parametrize('header', [ [signature('s1'), signature('s2')], diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index b168fbefc9a..7462313c74f 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1165,7 +1165,7 @@ def test_replace_with_chord(self): self.mytask.replace(sig1) def test_replace_callback(self): - c = group([self.mytask.s(), self.mytask.s()], app=self.app) + c = group([self.mytask.s()], app=self.app) c.freeze = Mock(name='freeze') c.delay = Mock(name='delay') self.mytask.request.id = 'id' From 78d847666b114acaa23a5b918b2f352e6d22b58a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Jan 2025 16:35:47 +0200 Subject: [PATCH 0934/1051] fix(django): catch the right error when trying to close db connection (#9392) (#9529) * fix(django): catch the right error when trying to close db connection (#9310) * chore(django): improve coverage for django fixup test Co-authored-by: Laurent Tramoy <7586076+Lotram@users.noreply.github.com> --- celery/fixups/django.py | 8 +++++--- t/unit/fixups/test_django.py | 11 ++++++++--- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index 5a8ca1b993a..b35499493a6 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -16,6 +16,7 @@ from types import ModuleType from typing import Protocol + from django.db.backends.base.base import BaseDatabaseWrapper from django.db.utils import ConnectionHandler from celery.app.base import Celery @@ -164,15 +165,16 @@ def on_worker_process_init(self, **kwargs: Any) -> None: # network IO that close() might cause. for c in self._db.connections.all(): if c and c.connection: - self._maybe_close_db_fd(c.connection) + self._maybe_close_db_fd(c) # use the _ version to avoid DB_REUSE preventing the conn.close() call self._close_database(force=True) self.close_cache() - def _maybe_close_db_fd(self, fd: IO) -> None: + def _maybe_close_db_fd(self, c: "BaseDatabaseWrapper") -> None: try: - _maybe_close_fd(fd) + with c.wrap_database_errors: + _maybe_close_fd(c.connection) except self.interface_errors: pass diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index 72b4d60d873..c09ba61642c 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest @@ -156,6 +156,10 @@ def test_on_worker_init(self): assert f._worker_fixup is DWF.return_value +class InterfaceError(Exception): + pass + + class test_DjangoWorkerFixup(FixupCase): Fixup = DjangoWorkerFixup @@ -180,14 +184,15 @@ def test_install(self): def test_on_worker_process_init(self, patching): with self.fixup_context(self.app) as (f, _, _): - with patch('celery.fixups.django._maybe_close_fd') as mcf: + with patch('celery.fixups.django._maybe_close_fd', side_effect=InterfaceError) as mcf: _all = f._db.connections.all = Mock() conns = _all.return_value = [ - Mock(), Mock(), + Mock(), MagicMock(), ] conns[0].connection = None with patch.object(f, 'close_cache'): with patch.object(f, '_close_database'): + f.interface_errors = (InterfaceError, ) f.on_worker_process_init() mcf.assert_called_with(conns[1].connection) f.close_cache.assert_called_with() From 73efb671e4250e41b14f76d7d51f487c445ec578 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Jan 2025 16:35:59 +0200 Subject: [PATCH 0935/1051] Link to the correct IRC network (#9509) (#9531) Co-authored-by: Yigit Sever --- docs/includes/resources.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 4bfbfd17926..91ef547e9d2 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -21,7 +21,7 @@ IRC Come chat with us on IRC. The **#celery** channel is located at the `Libera Chat`_ network. -.. _`Libera Chat`: https://freenode.net +.. _`Libera Chat`: https://libera.chat/ .. _bug-tracker: From 84737316a3609ef9978b12e84a01e07d664cfcb5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 29 Jan 2025 16:37:39 +0200 Subject: [PATCH 0936/1051] Update canvas.rst to fix output result from chain object (#9502) (#9532) The output of the following code: ```python res = (add.s(4, 4) | group(add.si(i, i) for i in range(10))) res.get() ``` should be: ``` [0, 2, 4, 6, 8, 10, 12, 14, 16, 18] ``` but in documentation is wrongly: ``` ``` Co-authored-by: Kamal Farahani <17600026+kamalfarahani@users.noreply.github.com> --- docs/userguide/canvas.rst | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 8b74e38b955..3268e93367a 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -461,17 +461,7 @@ Here're some examples: >>> res = (add.s(4, 4) | group(add.si(i, i) for i in range(10)))() >>> res.get() - + [0, 2, 4, 6, 8, 10, 12, 14, 16, 18] >>> res.parent.get() 8 From ca451a7d3016705512058e9aaef758cfb87e8008 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 2 Feb 2025 12:37:37 +0600 Subject: [PATCH 0937/1051] Update test-ci-base.txt (#9539) --- requirements/test-ci-base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 05ee50df850..b5649723471 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,6 +1,6 @@ pytest-cov==5.0.0; python_version<"3.9" pytest-cov==6.0.0; python_version>="3.9" -pytest-github-actions-annotate-failures==0.2.0 +pytest-github-actions-annotate-failures==0.3.0 -r extras/redis.txt -r extras/sqlalchemy.txt -r extras/pymemcache.txt From 939cfe57456c7d1f2e08c846d905ecf226f97924 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 2 Feb 2025 12:43:03 +0600 Subject: [PATCH 0938/1051] Update install-pyenv.sh (#9540) seems this part was not aligned with other updates --- docker/scripts/install-pyenv.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docker/scripts/install-pyenv.sh b/docker/scripts/install-pyenv.sh index ed63664fbdc..adfb3a96e11 100644 --- a/docker/scripts/install-pyenv.sh +++ b/docker/scripts/install-pyenv.sh @@ -7,8 +7,9 @@ curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias # Python versions to test against -VERSION_ALIAS="python3.12" pyenv install 3.12.0 -VERSION_ALIAS="python3.11" pyenv install 3.11.6 -VERSION_ALIAS="python3.10" pyenv install 3.10.13 -VERSION_ALIAS="python3.9" pyenv install 3.9.18 -VERSION_ALIAS="python3.8" pyenv install 3.8.18 +VERSION_ALIAS="python3.13" pyenv install 3.13.1 +VERSION_ALIAS="python3.12" pyenv install 3.12.8 +VERSION_ALIAS="python3.11" pyenv install 3.11.11 +VERSION_ALIAS="python3.10" pyenv install 3.10.16 +VERSION_ALIAS="python3.9" pyenv install 3.9.21 +VERSION_ALIAS="python3.8" pyenv install 3.8.20 From a9402a7f4e3e283c4a768ebdeba0f9eaa3a31990 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 2 Feb 2025 13:41:27 +0600 Subject: [PATCH 0939/1051] [pre-commit.ci] pre-commit autoupdate (#9524) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/codespell-project/codespell: v2.3.0 → v2.4.0](https://github.com/codespell-project/codespell/compare/v2.3.0...v2.4.0) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- .pre-commit-config.yaml | 2 +- docs/userguide/concurrency/gevent.rst | 2 +- t/unit/utils/test_functional.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 15abbf127e9..c233a488509 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.0 hooks: - id: codespell # See pyproject.toml for args args: [--toml, pyproject.toml, --write-changes] diff --git a/docs/userguide/concurrency/gevent.rst b/docs/userguide/concurrency/gevent.rst index 7ec8eca414e..1bafd9ceb52 100644 --- a/docs/userguide/concurrency/gevent.rst +++ b/docs/userguide/concurrency/gevent.rst @@ -17,7 +17,7 @@ Features include: * Fast event loop based on `libev`_ or `libuv`_. * Lightweight execution units based on greenlets. -* API that re-uses concepts from the Python standard library (for +* API that reuses concepts from the Python standard library (for examples there are `events`_ and `queues`_). * `Cooperative sockets with SSL support `_ diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index 52fdce6a96a..a8c9dc1e893 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -195,7 +195,7 @@ def __iter__(self): # The following checks are for the known "misbehaviour" assert getattr(g, "_regen__done") is False # If the `regen()` instance doesn't think it's done then it'll dupe the - # elements from the underlying iterator if it can be re-used + # elements from the underlying iterator if it can be reused iter_g = iter(g) for e in original_list * 2: assert next(iter_g) == e From f5b6e983232c940d0a698f5c417e066dc54597fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Feb 2025 13:42:15 +0600 Subject: [PATCH 0940/1051] Update elasticsearch requirement from <=8.17.0 to <=8.17.1 (#9518) Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.17.1) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index a729c4ae794..4f4d0292955 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.17.0 +elasticsearch<=8.17.1 elastic-transport<=8.15.1 From d017888447c105753274c5c2712c684dc48667ff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Feb 2025 13:50:04 +0600 Subject: [PATCH 0941/1051] Bump google-cloud-firestore from 2.19.0 to 2.20.0 (#9493) Bumps [google-cloud-firestore](https://github.com/googleapis/python-firestore) from 2.19.0 to 2.20.0. - [Release notes](https://github.com/googleapis/python-firestore/releases) - [Changelog](https://github.com/googleapis/python-firestore/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/python-firestore/compare/v2.19.0...v2.20.0) --- updated-dependencies: - dependency-name: google-cloud-firestore dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- requirements/extras/gcs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt index 363a19b8c8b..28ba9ac9ae9 100644 --- a/requirements/extras/gcs.txt +++ b/requirements/extras/gcs.txt @@ -1,3 +1,3 @@ google-cloud-storage>=2.10.0 -google-cloud-firestore==2.19.0 +google-cloud-firestore==2.20.0 grpcio==1.67.0 From b8eb8485110b905c9b53982c7a2f7df11a0f0d9e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Feb 2025 13:55:18 +0600 Subject: [PATCH 0942/1051] Bump mypy from 1.14.0 to 1.14.1 (#9483) Bumps [mypy](https://github.com/python/mypy) from 1.14.0 to 1.14.1. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.14.0...v1.14.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index 4a8adf99fab..f115c70ba78 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -10,7 +10,7 @@ pytest-order==1.3.0 boto3>=1.26.143 moto>=4.1.11,<5.1.0 # typing extensions -mypy==1.14.0; platform_python_implementation=="CPython" +mypy==1.14.1; platform_python_implementation=="CPython" pre-commit>=3.5.0,<3.8.0; python_version < '3.9' pre-commit>=4.0.1; python_version >= '3.9' -r extras/yaml.txt From 57ab2a651ea7cd4089532ba9478ecd65e8f159d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Feb 2025 14:57:40 +0600 Subject: [PATCH 0943/1051] Update elastic-transport requirement from <=8.15.1 to <=8.17.0 (#9490) Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/v8.17.0/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.17.0) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 4f4d0292955..80d47852d1e 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.17.1 -elastic-transport<=8.15.1 +elastic-transport<=8.17.0 From d2052c6436cca85bb619d33ba40610101ad4de43 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 16:55:27 +0000 Subject: [PATCH 0944/1051] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/codespell-project/codespell: v2.4.0 → v2.4.1](https://github.com/codespell-project/codespell/compare/v2.4.0...v2.4.1) - [github.com/pycqa/isort: 5.13.2 → 6.0.0](https://github.com/pycqa/isort/compare/5.13.2...6.0.0) - [github.com/pre-commit/mirrors-mypy: v1.14.0 → v1.14.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.14.0...v1.14.1) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c233a488509..9f740ce952c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/codespell-project/codespell - rev: v2.4.0 + rev: v2.4.1 hooks: - id: codespell # See pyproject.toml for args args: [--toml, pyproject.toml, --write-changes] @@ -34,12 +34,12 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 5.13.2 + rev: 6.0.0 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.0 + rev: v1.14.1 hooks: - id: mypy pass_filenames: false From 11eb4d36accc674fed3c87975ba63543abf9d89e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 4 Feb 2025 13:56:08 +0200 Subject: [PATCH 0945/1051] Revert "[pre-commit.ci] pre-commit autoupdate" (#9545) This reverts commit d2052c6436cca85bb619d33ba40610101ad4de43. --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9f740ce952c..c233a488509 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: exclude: ^celery/app/task\.py$|^celery/backends/cache\.py$ - repo: https://github.com/codespell-project/codespell - rev: v2.4.1 + rev: v2.4.0 hooks: - id: codespell # See pyproject.toml for args args: [--toml, pyproject.toml, --write-changes] @@ -34,12 +34,12 @@ repos: - id: mixed-line-ending - repo: https://github.com/pycqa/isort - rev: 6.0.0 + rev: 5.13.2 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.1 + rev: v1.14.0 hooks: - id: mypy pass_filenames: false From b1b886d1dde479feaf54874a660ddec4a97e2442 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 5 Feb 2025 13:17:32 +0600 Subject: [PATCH 0946/1051] Update Dockerfile by adding missing Python version 3.13 --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 3cc2a3aff38..479613ac51f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -75,7 +75,7 @@ RUN pyenv install pypy3.10 # Set global Python versions -RUN pyenv global 3.12 3.11 3.10 3.9 3.8 pypy3.10 +RUN pyenv global 3.13 3.12 3.11 3.10 3.9 3.8 pypy3.10 # Install celery WORKDIR $HOME From cc9e96de90a434a4901aaad9e3f9769339f3a3e4 Mon Sep 17 00:00:00 2001 From: Dave Johansen Date: Tue, 14 Jan 2025 19:42:05 -0700 Subject: [PATCH 0947/1051] Fix typo for default of sig --- celery/apps/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 435d333eebb..8669e7d621e 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -279,7 +279,7 @@ def set_process_status(self, info): ) -def _shutdown_handler(worker: Worker, sig='TERM', how='Warm', callback=None, exitcode=EX_OK, verbose=True): +def _shutdown_handler(worker: Worker, sig='SIGTERM', how='Warm', callback=None, exitcode=EX_OK, verbose=True): """Install signal handler for warm/cold shutdown. The handler will run from the MainProcess. From 32574874d61cb6dc101c68c59be5a67d5bdac23d Mon Sep 17 00:00:00 2001 From: Phil Crockett Date: Tue, 11 Feb 2025 09:17:14 +0100 Subject: [PATCH 0948/1051] fix(crontab): resolve constructor type conflicts (#9551) * fix(crontab): resolve constructor type conflicts * fix __reduce__ return type * make python 3.8 and 3.9 happy with Cronspec type * fix iterable subscript error in python 3.8 --------- Co-authored-by: Asif Saif Uddin --- celery/schedules.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/celery/schedules.py b/celery/schedules.py index 9cd051004e7..010b3396fa8 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -4,9 +4,8 @@ import re from bisect import bisect, bisect_left from collections import namedtuple -from collections.abc import Iterable from datetime import datetime, timedelta, tzinfo -from typing import Any, Callable, Mapping, Sequence +from typing import Any, Callable, Iterable, Mapping, Sequence, Union from kombu.utils.objects import cached_property @@ -52,7 +51,10 @@ """ -def cronfield(s: str) -> str: +Cronspec = Union[int, str, Iterable[int]] + + +def cronfield(s: Cronspec | None) -> Cronspec: return '*' if s is None else s @@ -396,8 +398,8 @@ class crontab(BaseSchedule): present in ``month_of_year``. """ - def __init__(self, minute: str = '*', hour: str = '*', day_of_week: str = '*', - day_of_month: str = '*', month_of_year: str = '*', **kwargs: Any) -> None: + def __init__(self, minute: Cronspec = '*', hour: Cronspec = '*', day_of_week: Cronspec = '*', + day_of_month: Cronspec = '*', month_of_year: Cronspec = '*', **kwargs: Any) -> None: self._orig_minute = cronfield(minute) self._orig_hour = cronfield(hour) self._orig_day_of_week = cronfield(day_of_week) @@ -430,7 +432,7 @@ def from_string(cls, crontab: str) -> crontab: @staticmethod def _expand_cronspec( - cronspec: int | str | Iterable, + cronspec: Cronspec, max_: int, min_: int = 0) -> set[Any]: """Expand cron specification. @@ -555,7 +557,7 @@ def roll_over() -> None: def __repr__(self) -> str: return CRON_REPR.format(self) - def __reduce__(self) -> tuple[type, tuple[str, str, str, str, str], Any]: + def __reduce__(self) -> tuple[type, tuple[Cronspec, Cronspec, Cronspec, Cronspec, Cronspec], Any]: return (self.__class__, (self._orig_minute, self._orig_hour, self._orig_day_of_week, From ad5c74fbdb7e94aaa47b069278d62f85c932139f Mon Sep 17 00:00:00 2001 From: mksm Date: Sun, 9 Feb 2025 17:31:06 -0300 Subject: [PATCH 0949/1051] worker_max_memory_per_child: kilobyte is 1024 bytes --- docs/userguide/configuration.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 01b276458ec..56521e0400c 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3207,16 +3207,16 @@ it's replaced with a new one. Default is no limit. Default: No limit. Type: int (kilobytes) -Maximum amount of resident memory, in kilobytes, that may be consumed by a -worker before it will be replaced by a new worker. If a single -task causes a worker to exceed this limit, the task will be -completed, and the worker will be replaced afterwards. +Maximum amount of resident memory, in kilobytes (1024 bytes), that may be +consumed by a worker before it will be replaced by a new worker. If a single +task causes a worker to exceed this limit, the task will be completed, and the +worker will be replaced afterwards. Example: .. code-block:: python - worker_max_memory_per_child = 12000 # 12MB + worker_max_memory_per_child = 12288 # 12 * 1024 = 12 MB .. setting:: worker_disable_rate_limits From 4fbd1f9541e03d4444b59f990f6e76575987d17e Mon Sep 17 00:00:00 2001 From: Henrik Ossipoff Hansen Date: Tue, 11 Feb 2025 20:51:09 +0100 Subject: [PATCH 0950/1051] Fix formatting in quorum queue docs (#9555) --- docs/getting-started/backends-and-brokers/rabbitmq.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index 5a324ecdc35..4dae16877e3 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -185,7 +185,7 @@ Using Quorum Queues Quorum Queues require disabling global QoS which means some features won't work as expected. See `limitations`_ for details. -Celery supports `Quorum Queues`_ by setting the ``x-queue-type`` header to ``quorum` like so: +Celery supports `Quorum Queues`_ by setting the ``x-queue-type`` header to ``quorum`` like so: .. code-block:: python From 2e7a205f92b66983ffaf98fbb104c6645c0a3a8e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Feb 2025 03:28:16 +0200 Subject: [PATCH 0951/1051] Bump cryptography from 44.0.0 to 44.0.1 (#9556) Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.0 to 44.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/44.0.0...44.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index afd08f7b18e..241eda13e6a 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==44.0.0 +cryptography==44.0.1 From dbf2de8d13d0469b5df36d517579bc51f0f88826 Mon Sep 17 00:00:00 2001 From: mike lazko Date: Wed, 12 Feb 2025 12:38:17 +0700 Subject: [PATCH 0952/1051] Fix send_task method when detect is native delayed delivery approach available (#9552) Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- celery/app/base.py | 4 ++-- t/unit/app/test_app.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 27b0421763c..5c853af70e5 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -832,8 +832,8 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, options = router.route( options, route_name or name, args, kwargs, task_type) - is_native_delayed_delivery = detect_quorum_queues(self, - self.producer_pool.connections.connection.transport_cls)[0] + driver_type = self.producer_pool.connections.connection.transport.driver_type + is_native_delayed_delivery = detect_quorum_queues(self, driver_type)[0] if is_native_delayed_delivery and options['queue'].exchange.type != 'direct': if eta: if isinstance(eta, str): diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 479a418cf67..9092ffaaa5c 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1448,6 +1448,8 @@ def test_native_delayed_delivery_countdown(self, detect_quorum_queues): exchange=exchange, routing_key='0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.1.1.1.0.testcelery' ) + driver_type_stub = self.app.amqp.producer_pool.connections.connection.transport.driver_type + detect_quorum_queues.assert_called_once_with(self.app, driver_type_stub) @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) def test_native_delayed_delivery_eta_datetime(self, detect_quorum_queues): From bbe7c2ef4e9f0eab06ef49695823845c7efc0e08 Mon Sep 17 00:00:00 2001 From: Mehraz Hossain Rumman <59512321+MehrazRumman@users.noreply.github.com> Date: Thu, 13 Feb 2025 14:01:33 +0600 Subject: [PATCH 0953/1051] Reverted PR #7814 & minor code improvement (#9494) * PR #7814 reverted * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * format changed * formatted * Update celery/utils/term.py * lint fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * lint fix * tesst added for support images * tesst added for support images * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * spacing fixed --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin Co-authored-by: Omer Katz --- celery/app/log.py | 3 ++- celery/apps/worker.py | 4 ++-- celery/platforms.py | 10 +++++++++- celery/utils/term.py | 6 +++++- t/unit/utils/test_platforms.py | 13 ++++++++++--- t/unit/utils/test_term.py | 17 ++++++++++++++++- 6 files changed, 44 insertions(+), 9 deletions(-) diff --git a/celery/app/log.py b/celery/app/log.py index 4c807f4e349..a4db1057791 100644 --- a/celery/app/log.py +++ b/celery/app/log.py @@ -18,6 +18,7 @@ from celery._state import get_current_task from celery.exceptions import CDeprecationWarning, CPendingDeprecationWarning from celery.local import class_property +from celery.platforms import isatty from celery.utils.log import (ColorFormatter, LoggingProxy, get_logger, get_multiprocessing_logger, mlevel, reset_multiprocessing_logger) from celery.utils.nodenames import node_format @@ -203,7 +204,7 @@ def supports_color(self, colorize=None, logfile=None): if colorize or colorize is None: # Only use color if there's no active log file # and stderr is an actual terminal. - return logfile is None and sys.stderr.isatty() + return logfile is None and isatty(sys.stderr) return colorize def colored(self, logfile=None, enabled=None): diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 8669e7d621e..5558dab8e5f 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -20,7 +20,7 @@ from celery import VERSION_BANNER, platforms, signals from celery.app import trace from celery.loaders.app import AppLoader -from celery.platforms import EX_FAILURE, EX_OK, check_privileges +from celery.platforms import EX_FAILURE, EX_OK, check_privileges, isatty from celery.utils import static, term from celery.utils.debug import cry from celery.utils.imports import qualname @@ -107,7 +107,7 @@ def on_after_init(self, purge=False, no_color=None, super().setup_defaults(**kwargs) self.purge = purge self.no_color = no_color - self._isatty = sys.stdout.isatty() + self._isatty = isatty(sys.stdout) self.colored = self.app.log.colored( self.logfile, enabled=not no_color if no_color is not None else no_color diff --git a/celery/platforms.py b/celery/platforms.py index a9c30a3251e..c0d0438a78e 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -42,7 +42,7 @@ 'DaemonContext', 'detached', 'parse_uid', 'parse_gid', 'setgroups', 'initgroups', 'setgid', 'setuid', 'maybe_drop_privileges', 'signals', 'signal_name', 'set_process_title', 'set_mp_process_title', - 'get_errno_name', 'ignore_errno', 'fd_by_path', + 'get_errno_name', 'ignore_errno', 'fd_by_path', 'isatty', ) # exitcodes @@ -95,6 +95,14 @@ SIGMAP = {getattr(_signal, name): name for name in SIGNAMES} +def isatty(fh): + """Return true if the process has a controlling terminal.""" + try: + return fh.isatty() + except AttributeError: + pass + + def pyimplementation(): """Return string identifying the current Python implementation.""" if hasattr(_platform, 'python_implementation'): diff --git a/celery/utils/term.py b/celery/utils/term.py index 53236ad549d..ba6a3215fbc 100644 --- a/celery/utils/term.py +++ b/celery/utils/term.py @@ -165,7 +165,11 @@ def __add__(self, other: object) -> str: def supports_images() -> bool: - return sys.stdin.isatty() and ITERM_PROFILE is not None + + try: + return sys.stdin.isatty() and bool(os.environ.get('ITERM_PROFILE')) + except AttributeError: + return False def _read_as_base64(path: str) -> str: diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index fdac88288dc..ebbcdc236c2 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -13,9 +13,9 @@ from celery.exceptions import SecurityError, SecurityWarning from celery.platforms import (ASSUMING_ROOT, ROOT_DISALLOWED, ROOT_DISCOURAGED, DaemonContext, LockFailed, Pidfile, _setgroups_hack, check_privileges, close_open_fds, create_pidlock, detached, - fd_by_path, get_fdmax, ignore_errno, initgroups, maybe_drop_privileges, parse_gid, - parse_uid, set_mp_process_title, set_pdeathsig, set_process_title, setgid, setgroups, - setuid, signals) + fd_by_path, get_fdmax, ignore_errno, initgroups, isatty, maybe_drop_privileges, + parse_gid, parse_uid, set_mp_process_title, set_pdeathsig, set_process_title, setgid, + setgroups, setuid, signals) from celery.utils.text import WhateverIO from t.unit import conftest @@ -25,6 +25,13 @@ resource = None +def test_isatty(): + fh = Mock(name='fh') + assert isatty(fh) is fh.isatty() + fh.isatty.side_effect = AttributeError() + assert not isatty(fh) + + class test_find_option_with_arg: def test_long_opt(self): diff --git a/t/unit/utils/test_term.py b/t/unit/utils/test_term.py index 2261b59f8e3..1a505ca54e5 100644 --- a/t/unit/utils/test_term.py +++ b/t/unit/utils/test_term.py @@ -1,11 +1,13 @@ +import os from base64 import b64encode from tempfile import NamedTemporaryFile +from unittest.mock import patch import pytest import t.skip from celery.utils import term -from celery.utils.term import _read_as_base64, colored, fg +from celery.utils.term import _read_as_base64, colored, fg, supports_images @t.skip.if_win32 @@ -70,3 +72,16 @@ def test_read_as_base64(self): expected_result = b64encode(test_data).decode('ascii') assert result == expected_result + + @pytest.mark.parametrize('is_tty, iterm_profile, expected', [ + (True, 'test_profile', True), + (False, 'test_profile', False), + (True, None, False), + ]) + @patch('sys.stdin.isatty') + @patch.dict(os.environ, {'ITERM_PROFILE': 'test_profile'}, clear=True) + def test_supports_images(self, mock_isatty, is_tty, iterm_profile, expected): + mock_isatty.return_value = is_tty + if iterm_profile is None: + del os.environ['ITERM_PROFILE'] + assert supports_images() == expected From ef14de924b26222f4ed47604f69378ce8f25ec52 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 16 Feb 2025 01:59:09 +0200 Subject: [PATCH 0954/1051] Improved donation and sponsorship visibility (#9558) * Improved donation and sponsorship visibility * Fixed main index.rst --- README.rst | 69 ++++++++++--------- .../backends-and-brokers/redis.rst | 6 ++ docs/index.rst | 17 +++-- 3 files changed, 51 insertions(+), 41 deletions(-) diff --git a/README.rst b/README.rst index 1acac3a69fd..279f3bc5b57 100644 --- a/README.rst +++ b/README.rst @@ -12,12 +12,16 @@ Donations ========= -This project relies on your generous donations. +Open Collective +--------------- -If you are using Celery to create a commercial product, please consider becoming our `backer`_ or our `sponsor`_ to ensure Celery's future. +.. image:: https://opencollective.com/static/images/opencollectivelogo-footer-n.svg + :alt: Open Collective logo + :width: 200px -.. _`backer`: https://opencollective.com/celery#backer -.. _`sponsor`: https://opencollective.com/celery#sponsor +`Open Collective `_ is our community-powered funding platform that fuels Celery's +ongoing development. Your sponsorship directly supports improvements, maintenance, and innovative features that keep +Celery robust and reliable. For enterprise ============== @@ -26,20 +30,46 @@ Available as part of the Tidelift Subscription. The maintainers of ``celery`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. `Learn more. `_ +Sponsors +======== -Sponsor -======= +Blacksmith +---------- + +.. image:: ./docs/images/blacksmith-logo-white-on-black.svg + :alt: Blacksmith logo + :width: 240px -`Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, Dragonfly frees developers from the limits of traditional in-memory data stores. +`Official Announcement `_ + +Upstash +------- +.. image:: https://upstash.com/logo/upstash-dark-bg.svg + :alt: Upstash logo + :width: 200px + +`Upstash `_ offers a serverless Redis database service, +providing a seamless solution for Celery users looking to leverage +serverless architectures. Upstash's serverless Redis service is designed +with an eventual consistency model and durable storage, facilitated +through a multi-tier storage architecture. + +Dragonfly +--------- .. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.svg :alt: Dragonfly logo :width: 150px +`Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. +Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, +Dragonfly frees developers from the limits of traditional in-memory data stores. +.. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg + :target: https://opencollective.com/celery/sponsor/0/website What's a Task Queue? ==================== @@ -506,31 +536,6 @@ Thank you to all our backers! 🙏 [`Become a backer`_] .. |oc-backers| image:: https://opencollective.com/celery/backers.svg?width=890 :target: https://opencollective.com/celery#backers -Sponsors --------- - -Support this project by becoming a sponsor. Your logo will show up here with a -link to your website. [`Become a sponsor`_] - -.. _`Become a sponsor`: https://opencollective.com/celery#sponsor - -|oc-sponsor-1| |oc-sponsor-2| |oc-sponsor-3| - -.. |oc-sponsor-1| image:: https://opencollective.com/celery/sponsor/0/avatar.svg - :target: https://opencollective.com/celery/sponsor/0/website - -.. |oc-sponsor-2| image:: ./docs/images/blacksmith-logo-white-on-black.svg - :target: https://www.blacksmith.sh/ - :alt: Blacksmith.sh - :width: 240 - :height: 57 - -.. |oc-sponsor-3| image:: https://upstash.com/logo/upstash-dark-bg.svg - :target: http://upstash.com/?code=celery - :alt: Upstash - :width: 200 - :height: 57 - .. _license: License diff --git a/docs/getting-started/backends-and-brokers/redis.rst b/docs/getting-started/backends-and-brokers/redis.rst index 997431b895f..11d42544ec2 100644 --- a/docs/getting-started/backends-and-brokers/redis.rst +++ b/docs/getting-started/backends-and-brokers/redis.rst @@ -160,6 +160,12 @@ through a multi-tier storage architecture. Integration with Celery is straightforward as demonstrated in an `example provided by Upstash `_. +Dragonfly +--------- +`Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. +Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, +Dragonfly frees developers from the limits of traditional in-memory data stores. + .. _redis-caveats: Caveats diff --git a/docs/index.rst b/docs/index.rst index 299fb5749f2..96998428397 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,15 +15,14 @@ or :ref:`our mailing-list `. Celery is Open Source and licensed under the `BSD License`_. -Donations -========= - -This project relies on your generous donations. - -If you are using Celery to create a commercial product, please consider becoming our `backer`_ or our `sponsor`_ to ensure Celery's future. - -.. _`backer`: https://opencollective.com/celery#backer -.. _`sponsor`: https://opencollective.com/celery#sponsor +.. image:: https://opencollective.com/static/images/opencollectivelogo-footer-n.svg + :target: https://opencollective.com/celery + :alt: Open Collective logo + :width: 240px + +`Open Collective `_ is our community-powered funding platform that fuels Celery's +ongoing development. Your sponsorship directly supports improvements, maintenance, and innovative features that keep +Celery robust and reliable. Getting Started =============== From 8699a4f051776865116302391770f225a222ac16 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 16 Feb 2025 02:03:53 +0200 Subject: [PATCH 0955/1051] Updated the Getting Help section, replacing deprecated with new resources (#9559) --- docs/includes/resources.txt | 31 +++++++++++-------------------- docs/index.rst | 3 +-- 2 files changed, 12 insertions(+), 22 deletions(-) diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 91ef547e9d2..23e309513c8 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -3,25 +3,23 @@ Getting Help ============ -.. _mailing-list: +.. warning:: -Mailing list ------------- + Our `Google Groups account `_ has been + `compromised `_. -For discussions about the usage, development, and future of Celery, -please join the `celery-users`_ mailing list. +.. _social-media: -.. _`celery-users`: https://groups.google.com/group/celery-users/ - -.. _irc-channel: +Social Media +============ -IRC ---- +Follow us on social media: -Come chat with us on IRC. The **#celery** channel is located at the `Libera Chat`_ -network. +- `X `_ +- `LinkedIn `_ -.. _`Libera Chat`: https://libera.chat/ +These accounts will (mostly) mirror each other, but we encourage you to +follow us on all platforms to ensure you don't miss any important updates. .. _bug-tracker: @@ -31,13 +29,6 @@ Bug tracker If you have any suggestions, bug reports, or annoyances please report them to our issue tracker at https://github.com/celery/celery/issues/ -.. _wiki: - -Wiki -==== - -https://github.com/celery/celery/wiki - .. _contributing-short: Contributing diff --git a/docs/index.rst b/docs/index.rst index 96998428397..107d96e019c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,8 +10,7 @@ It's a task queue with focus on real-time processing, while also supporting task scheduling. Celery has a large and diverse community of users and contributors, -you should come join us :ref:`on IRC ` -or :ref:`our mailing-list `. +don't hesitate to ask questions or :ref:`get involved `. Celery is Open Source and licensed under the `BSD License`_. From 7d5157343cc919b0f94d602ff1d2c037b38815a3 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 18 Feb 2025 21:49:52 +0200 Subject: [PATCH 0956/1051] Fixed django example (#9562) --- examples/django/proj/celery.py | 4 +--- examples/django/proj/urls.py | 3 ++- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/examples/django/proj/celery.py b/examples/django/proj/celery.py index 182da54fb55..ec3354dcdf3 100644 --- a/examples/django/proj/celery.py +++ b/examples/django/proj/celery.py @@ -1,7 +1,5 @@ import os -from django.conf import settings - from celery import Celery # Set the default Django settings module for the 'celery' program. @@ -13,7 +11,7 @@ # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object(f'django.conf:{settings.__name__}', namespace='CELERY') +app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django apps. app.autodiscover_tasks() diff --git a/examples/django/proj/urls.py b/examples/django/proj/urls.py index 5f67c27b660..bfbc09114ee 100644 --- a/examples/django/proj/urls.py +++ b/examples/django/proj/urls.py @@ -1,4 +1,5 @@ -from django.urls import handler404, handler500, include, url # noqa +from django.conf.urls import handler404, handler500 # noqa +from django.urls import include, path # noqa # Uncomment the next two lines to enable the admin: # from django.contrib import admin From 60b5644a10d56024a2d66b6c2af7169370d51d8d Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 20 Feb 2025 01:37:12 +0200 Subject: [PATCH 0957/1051] Bump Kombu to v5.5.0rc3 (#9564) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index bed03e2bd56..dcec525e00a 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.0rc2,<6.0 +kombu>=5.5.0rc3,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From f5fa8378e31b3a59f2b83836e34a08c79e65a6b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 01:48:37 +0200 Subject: [PATCH 0958/1051] Bump ephem from 4.1.6 to 4.2 (#9565) Bumps [ephem](https://github.com/brandon-rhodes/pyephem) from 4.1.6 to 4.2. - [Commits](https://github.com/brandon-rhodes/pyephem/compare/4.1.6...4.2) --- updated-dependencies: - dependency-name: ephem dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/solar.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/solar.txt b/requirements/extras/solar.txt index f0d13a35bb3..60b63fb7f24 100644 --- a/requirements/extras/solar.txt +++ b/requirements/extras/solar.txt @@ -1 +1 @@ -ephem==4.1.6; platform_python_implementation!="PyPy" +ephem==4.2; platform_python_implementation!="PyPy" From b7abaac239f253b52a527003eccd363ee878ab9e Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 21 Feb 2025 13:56:30 +0200 Subject: [PATCH 0959/1051] Bump pytest-celery to 1.2.0 (#9568) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 6f0f7a19896..01972fb128f 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]>=1.1.3 +pytest-celery[all]>=1.2.0 diff --git a/requirements/test.txt b/requirements/test.txt index f115c70ba78..c7f9a07dff1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.3.4 -pytest-celery[all]>=1.1.3 +pytest-celery[all]>=1.2.0 pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" pytest-subtests<0.14.0; python_version < "3.9" From 9bf05461dc8de9cb88f4279799e90e1dc0688196 Mon Sep 17 00:00:00 2001 From: James Meakin <12661555+jmsmkn@users.noreply.github.com> Date: Fri, 21 Feb 2025 13:29:44 +0100 Subject: [PATCH 0960/1051] Remove dependency on `pycurl` (#9526) * Remove dependency on `pycurl` The dependency on `pycurl` was removed from `kombu[sqs]` in https://github.com/celery/kombu/pull/2134, so is no longer required here. See #3619 * Update test-ci-default.txt --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Tomer Nosrati --- requirements/extras/sqs.txt | 2 -- requirements/test-ci-default.txt | 3 +-- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 43ee109e8c6..4160a304451 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1,5 +1,3 @@ boto3>=1.26.143 -pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" -pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" urllib3>=1.26.16 kombu[sqs]>=5.3.4 diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index 78994fa8e45..e689866e245 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -21,5 +21,4 @@ git+https://github.com/celery/kombu.git # SQS dependencies other than boto -pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" -pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" +urllib3>=1.26.16 From 22f7d1f86fcfeb6e5b9301f2ee7d3e6ba1f9f03f Mon Sep 17 00:00:00 2001 From: William David Edwards Date: Sun, 23 Feb 2025 15:41:32 +0100 Subject: [PATCH 0961/1051] Set TestWorkController.__test__ (#9574) When importing `TestWorkController` into test files, pytest considers it a test class, causing the following warning: ``` ../../../usr/local/lib/python3.11/site-packages/celery/contrib/testing/worker.py:30: 10 warnings /usr/local/lib/python3.11/site-packages/celery/contrib/testing/worker.py:30: PytestCollectionWarning: cannot collect test class 'TestWorkController' because it has a __init__ constructor (from: tests/feature_tests/app/api/api_v1/controllers/public/test_certificate_managers.py) class TestWorkController(worker.WorkController): ``` Importing this class is common for type annotating, e.g.: ``` test_module.py from celery.contrib.testing.worker import TestWorkController def test_stuff(celery_worker: TestWorkController): ... ``` Prevent pytest from discovering this class by setting `__test__ = False`. Documentation: https://docs.pytest.org/en/stable/example/pythoncollection.html#customizing-test-collection --- celery/contrib/testing/worker.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py index cb418b8e87a..46eac75fd64 100644 --- a/celery/contrib/testing/worker.py +++ b/celery/contrib/testing/worker.py @@ -30,6 +30,10 @@ class TestWorkController(worker.WorkController): """Worker that can synchronize on being fully started.""" + # When this class is imported in pytest files, prevent pytest from thinking + # this is a test class + __test__ = False + logger_queue = None def __init__(self, *args, **kwargs): From 35cca09a8bf7fc076b81dbb182867e96284fd025 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 24 Feb 2025 02:24:09 +0200 Subject: [PATCH 0962/1051] Fixed bug when revoking by stamped headers a stamp that does not exist (#9575) --- celery/app/control.py | 3 ++- t/integration/test_tasks.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/celery/app/control.py b/celery/app/control.py index 73b5162e851..603d930a542 100644 --- a/celery/app/control.py +++ b/celery/app/control.py @@ -527,7 +527,8 @@ def revoke_by_stamped_headers(self, headers, destination=None, terminate=False, if result: for host in result: for response in host.values(): - task_ids.update(response['ok']) + if isinstance(response['ok'], set): + task_ids.update(response['ok']) if task_ids: return self.revoke(list(task_ids), destination=destination, terminate=terminate, signal=signal, **kwargs) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 76c46fd3f65..1b175a01320 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -318,6 +318,16 @@ def on_signature(self, sig, **headers) -> dict: assert result.successful() is False worker_state.revoked_stamps.clear() + def test_revoke_by_stamped_headers_no_match(self, manager): + response = manager.app.control.revoke_by_stamped_headers( + {"myheader": ["myvalue"]}, + terminate=False, + reply=True, + ) + + expected_response = "headers {'myheader': ['myvalue']} flagged as revoked, but not terminated" + assert response[0][list(response[0].keys())[0]]["ok"] == expected_response + @flaky def test_wrong_arguments(self, manager): """Tests that proper exceptions are raised when task is called with wrong arguments.""" From 821e6557ae18a5ec163f7e4da5a1b50f3e047937 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 24 Feb 2025 17:39:17 +0200 Subject: [PATCH 0963/1051] Canvas Stamping Doc Fixes (#9578) * Doc Typo fix: revoke_by_stamped_header -> revoke_by_stamped_headers * Doc Fix: Added reference to the stamping feature in the apply_async header's section --- celery/app/task.py | 2 ++ docs/userguide/canvas.rst | 2 ++ docs/userguide/workers.rst | 24 ++++++++++++------------ 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 951c75824b7..90ba8552d4f 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -535,6 +535,8 @@ def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, publisher (kombu.Producer): Deprecated alias to ``producer``. headers (Dict): Message headers to be included in the message. + The headers can be used as an overlay for custom labeling + using the :ref:`canvas-stamping` feature. Returns: celery.result.AsyncResult: Promise of future evaluation. diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 3268e93367a..8d510a9c2a0 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1174,6 +1174,8 @@ of one: This means that the first task will have a countdown of one second, the second task a countdown of two seconds, and so on. +.. _canvas-stamping: + Stamping ======== diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 1f2cef97c83..01d6491d72b 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -613,13 +613,13 @@ Note that remote control commands must be working for revokes to work. Remote control commands are only supported by the RabbitMQ (amqp) and Redis at this point. -.. control:: revoke_by_stamped_header +.. control:: revoke_by_stamped_headers -``revoke_by_stamped_header``: Revoking tasks by their stamped headers ---------------------------------------------------------------------- +``revoke_by_stamped_headers``: Revoking tasks by their stamped headers +---------------------------------------------------------------------- :pool support: all, terminate only supported by prefork and eventlet :broker support: *amqp, redis* -:command: :program:`celery -A proj control revoke_by_stamped_header ` +:command: :program:`celery -A proj control revoke_by_stamped_headers ` This command is similar to :meth:`~@control.revoke`, but instead of specifying the task id(s), you specify the stamped header(s) as key-value pair(s), @@ -641,11 +641,11 @@ and each task that has a stamped header matching the key-value pair(s) will be r .. code-block:: pycon - >>> app.control.revoke_by_stamped_header({'header': 'value'}) + >>> app.control.revoke_by_stamped_headers({'header': 'value'}) - >>> app.control.revoke_by_stamped_header({'header': 'value'}, terminate=True) + >>> app.control.revoke_by_stamped_headers({'header': 'value'}, terminate=True) - >>> app.control.revoke_by_stamped_header({'header': 'value'}, terminate=True, signal='SIGKILL') + >>> app.control.revoke_by_stamped_headers({'header': 'value'}, terminate=True, signal='SIGKILL') Revoking multiple tasks by stamped headers @@ -653,14 +653,14 @@ Revoking multiple tasks by stamped headers .. versionadded:: 5.3 -The ``revoke_by_stamped_header`` method also accepts a list argument, where it will revoke +The ``revoke_by_stamped_headers`` method also accepts a list argument, where it will revoke by several headers or several values. **Example** .. code-block:: pycon - >> app.control.revoke_by_stamped_header({ + >> app.control.revoke_by_stamped_headers({ ... 'header_A': 'value_1', ... 'header_B': ['value_2', 'value_3'], }) @@ -672,11 +672,11 @@ and all of the tasks that have a stamped header ``header_B`` with values ``value .. code-block:: console - $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 + $ celery -A proj control revoke_by_stamped_headers stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 - $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate + $ celery -A proj control revoke_by_stamped_headers stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate - $ celery -A proj control revoke_by_stamped_header stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate --signal=SIGKILL + $ celery -A proj control revoke_by_stamped_headers stamped_header_key_A=stamped_header_value_1 stamped_header_key_B=stamped_header_value_2 --terminate --signal=SIGKILL .. _worker-time-limits: From 62b368404f6aa1de4220e2d795992051fa134cf2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 24 Feb 2025 23:25:12 +0200 Subject: [PATCH 0964/1051] Bugfix: Chord with a chord in header doesn't invoke error callback on inner chord header failure (default config) (#9580) * Reproduced bug * Solved bug * Added missing app=self.app in related unit tests --- celery/canvas.py | 7 +++++ t/integration/test_canvas.py | 54 +++++++++++++++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 6 ++-- t/unit/tasks/test_stamping.py | 4 +-- 4 files changed, 66 insertions(+), 5 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index 9f4d2f0ce74..da395c1390e 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -2307,6 +2307,13 @@ def link_error(self, errback): CPendingDeprecationWarning ) + # Edge case for nested chords in the header + for task in maybe_list(self.tasks) or []: + if isinstance(task, chord): + # Let the nested chord do the error linking itself on its + # header and body where needed, based on the current configuration + task.link_error(errback) + self.body.link_error(errback) return errback diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index d2474fa2351..ed838dc6730 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -2862,6 +2862,60 @@ def test_chord_body_chain_child_replaced_with_chain_last(self, manager): res_obj = orig_sig.delay() assert res_obj.get(timeout=TIMEOUT) == [42] + def test_nested_chord_header_link_error(self, manager, subtests): + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + if not manager.app.conf.result_backend.startswith("redis"): + raise pytest.skip("Requires redis result backend.") + redis_connection = get_redis_connection() + + errback_msg = "errback called" + errback_key = "echo_errback" + errback_sig = redis_echo.si(errback_msg, redis_key=errback_key) + + body_msg = "chord body called" + body_key = "echo_body" + body_sig = redis_echo.si(body_msg, redis_key=body_key) + + redis_connection.delete(errback_key, body_key) + + manager.app.conf.task_allow_error_cb_on_chord_header = False + + chord_inner = chord( + [identity.si("t1"), fail.si()], + identity.si("t2 (body)"), + ) + chord_outer = chord( + group( + [ + identity.si("t3"), + chord_inner, + ], + ), + body_sig, + ) + chord_outer.link_error(errback_sig) + chord_outer.delay() + + with subtests.test(msg="Confirm the body was not executed"): + with pytest.raises(TimeoutError): + # confirm the chord body was not called + await_redis_echo((body_msg,), redis_key=body_key, timeout=10) + # Double check + assert not redis_connection.exists(body_key), "Chord body was called when it should have not" + + with subtests.test(msg="Confirm only one errback was called"): + await_redis_echo((errback_msg,), redis_key=errback_key, timeout=10) + with pytest.raises(TimeoutError): + # Double check + await_redis_echo((errback_msg,), redis_key=errback_key, timeout=10) + + # Cleanup + redis_connection.delete(errback_key) + def test_enabling_flag_allow_error_cb_on_chord_header(self, manager, subtests): """ Test that the flag allow_error_callback_on_chord_header works as diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 1f901376205..6d287848c31 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1746,7 +1746,7 @@ def test_flag_allow_error_cb_on_chord_header_various_header_types(self): group(signature('t'), signature('t')) ] for chord_header in headers: - c = chord(chord_header, signature('t')) + c = chord(chord_header, signature('t'), app=self.app) sig = signature('t') errback = c.link_error(sig) assert errback == sig @@ -1754,7 +1754,7 @@ def test_flag_allow_error_cb_on_chord_header_various_header_types(self): @pytest.mark.usefixtures('depends_on_current_app') def test_flag_allow_error_cb_on_chord_header_with_dict_callback(self): self.app.conf.task_allow_error_cb_on_chord_header = True - c = chord(group(signature('th1'), signature('th2')), signature('tbody')) + c = chord(group(signature('th1'), signature('th2')), signature('tbody'), app=self.app) errback_dict = dict(signature('tcb')) errback = c.link_error(errback_dict) assert errback == errback_dict @@ -1783,7 +1783,7 @@ def test_chord_upgrade_on_chaining(self): def test_link_error_on_chord_header(self, header): """ Test that link_error on a chord also links the header """ self.app.conf.task_allow_error_cb_on_chord_header = True - c = chord(header, signature('body')) + c = chord(header, signature('body'), app=self.app) err = signature('err') errback = c.link_error(err) assert errback == err diff --git a/t/unit/tasks/test_stamping.py b/t/unit/tasks/test_stamping.py index 3d139abb9e9..1c8da859dd7 100644 --- a/t/unit/tasks/test_stamping.py +++ b/t/unit/tasks/test_stamping.py @@ -1300,13 +1300,13 @@ def tasks(): with subtests.test("chord header"): self.app.conf.task_allow_error_cb_on_chord_header = True - canvas = chord(tasks(), self.identity.si("body")) + canvas = chord(tasks(), self.identity.si("body"), app=self.app) canvas.link_error(s("group_link_error")) canvas.stamp(CustomStampingVisitor()) with subtests.test("chord body"): self.app.conf.task_allow_error_cb_on_chord_header = False - canvas = chord(tasks(), self.identity.si("body")) + canvas = chord(tasks(), self.identity.si("body"), app=self.app) canvas.link_error(s("group_link_error")) canvas.stamp(CustomStampingVisitor()) From e73b71ed2090e83765b14162cadde771c6b520ed Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 25 Feb 2025 18:13:27 +0200 Subject: [PATCH 0965/1051] Prepare for (pre) release: v5.5.0rc5 (#9582) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0rc4 → 5.5.0rc5 * Added Changelog for v5.5.0rc5 --- .bumpversion.cfg | 2 +- Changelog.rst | 163 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 163 +++++++++++++++++++++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 330 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 149c341155a..4baba791b06 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0rc4 +current_version = 5.5.0rc5 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 9357c597f9c..bbb0ee81802 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,169 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc5: + +5.5.0rc5 +======== + +:release-date: 2025-02-25 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 5 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc3 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is currently at 5.5.0rc3. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bump mypy from 1.13.0 to 1.14.0 (#9476) +- Fix cassandra backend port settings not working (#9465) +- Unroll group when a group with a single item is chained using the | operator (#9456) +- fix(django): catch the right error when trying to close db connection (#9392) +- Replacing a task with a chain which contains a group now returns a result instead of hanging (#9484) +- Avoid using a group of one as it is now unrolled into a chain (#9510) +- Link to the correct IRC network (#9509) +- Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504) +- Update canvas.rst to fix output result from chain object (#9502) +- Unauthorized Changes Cleanup (#9528) +- [RE-APPROVED] fix(django): catch the right error when trying to close db connection (#9529) +- [RE-APPROVED] Link to the correct IRC network (#9531) +- [RE-APPROVED] Update canvas.rst to fix output result from chain object (#9532) +- Update test-ci-base.txt (#9539) +- Update install-pyenv.sh (#9540) +- Update elasticsearch requirement from <=8.17.0 to <=8.17.1 (#9518) +- Bump google-cloud-firestore from 2.19.0 to 2.20.0 (#9493) +- Bump mypy from 1.14.0 to 1.14.1 (#9483) +- Update elastic-transport requirement from <=8.15.1 to <=8.17.0 (#9490) +- Update Dockerfile by adding missing Python version 3.13 (#9549) +- Fix typo for default of sig (#9495) +- fix(crontab): resolve constructor type conflicts (#9551) +- worker_max_memory_per_child: kilobyte is 1024 bytes (#9553) +- Fix formatting in quorum queue docs (#9555) +- Bump cryptography from 44.0.0 to 44.0.1 (#9556) +- Fix the send_task method when detecting if the native delayed delivery approach is available (#9552) +- Reverted PR #7814 & minor code improvement (#9494) +- Improved donation and sponsorship visibility (#9558) +- Updated the Getting Help section, replacing deprecated with new resources (#9559) +- Fixed django example (#9562) +- Bump Kombu to v5.5.0rc3 (#9564) +- Bump ephem from 4.1.6 to 4.2 (#9565) +- Bump pytest-celery to v1.2.0 (#9568) +- Remove dependency on `pycurl` (#9526) +- Set TestWorkController.__test__ (#9574) +- Fixed bug when revoking by stamped headers a stamp that does not exist (#9575) +- Canvas Stamping Doc Fixes (#9578) +- Bugfix: Chord with a chord in header doesn't invoke error callback on inner chord header failure (default config) (#9580) +- Prepare for (pre) release: v5.5.0rc5 (#9582) + .. _version-5.5.0rc4: 5.5.0rc4 diff --git a/README.rst b/README.rst index 279f3bc5b57..716d12c9f24 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0rc4 (immunity) +:Version: 5.5.0rc5 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 0557678fc68..dfecfd72c19 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0rc4' +__version__ = '5.5.0rc5' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index e62f3997dbe..a8042cd7a06 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,169 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0rc5: + +5.5.0rc5 +======== + +:release-date: 2025-02-25 +:release-by: Tomer Nosrati + +Celery v5.5.0 Release Candidate 5 is now available for testing. +Please help us test this version and report any issues. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` or read the main highlights below. + +Using Kombu 5.5.0rc3 +-------------------- + +The minimum required Kombu version has been bumped to 5.5.0. +Kombu is currently at 5.5.0rc3. + +Complete Quorum Queues Support +------------------------------ + +A completely new ETA mechanism was developed to allow full support with RabbitMQ Quorum Queues. + +After upgrading to this version, please share your feedback on the quorum queues support. + +Relevant Issues: +`#9207 `_, +`#6067 `_ + +- New :ref:`documentation `. +- New :setting:`broker_native_delayed_delivery_queue_type` configuration option. + +New support for Google Pub/Sub transport +---------------------------------------- + +After upgrading to this version, please share your feedback on the Google Pub/Sub transport support. + +Relevant Issues: +`#9351 `_ + +Python 3.13 Improved Support +---------------------------- + +Additional dependencies have been migrated successfully to Python 3.13, including Kombu and py-amqp. + +Soft Shutdown +------------- + +The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. +It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that are already running. +After the soft shutdown ends, the worker will initiate a graceful cold shutdown, stopping all tasks and exiting. + +The soft shutdown is disabled by default, and can be enabled by setting the new configuration option :setting:`worker_soft_shutdown_timeout`. +If a worker is not running any task when the soft shutdown initiates, it will skip the warm shutdown period and proceed directly to the cold shutdown +unless the new configuration option :setting:`worker_enable_soft_shutdown_on_idle` is set to True. This is useful for workers +that are idle, waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. + +The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, like :ref:`Redis ` +or :ref:`SQS `, to enable a more graceful cold shutdown procedure, allowing the worker enough time to re-queue tasks that were not +completed (e.g., ``Restoring 1 unacknowledged message(s)``) by resetting the visibility timeout of the unacknowledged messages just before +the worker exits completely. + +After upgrading to this version, please share your feedback on the new Soft Shutdown mechanism. + +Relevant Issues: +`#9213 `_, +`#9231 `_, +`#9238 `_ + +- New :ref:`documentation ` for each shutdown type. +- New :setting:`worker_soft_shutdown_timeout` configuration option. +- New :setting:`worker_enable_soft_shutdown_on_idle` configuration option. + +REMAP_SIGTERM +------------- + +The ``REMAP_SIGTERM`` "hidden feature" has been tested, :ref:`documented ` and is now officially supported. +This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using :sig:`TERM` +instead of :sig:`QUIT`. + +Pydantic Support +---------------- + +This release introduces support for Pydantic models in Celery tasks. +For more info, see the new pydantic example and PR `#9023 `_ by @mathiasertl. + +After upgrading to this version, please share your feedback on the new Pydantic support. + +Redis Broker Stability Improvements +----------------------------------- +The root cause of the Redis broker instability issue has been `identified and resolved `_ +in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer additional improvements. + +After upgrading to this version, please share your feedback on the Redis broker stability. + +Relevant Issues: +`#7276 `_, +`#8091 `_, +`#8030 `_, +`#8384 `_ + +Quorum Queues Initial Support +----------------------------- +This release introduces the initial support for Quorum Queues with Celery. + +See new configuration options for more details: + +- :setting:`task_default_queue_type` +- :setting:`worker_detect_quorum_queues` + +After upgrading to this version, please share your feedback on the Quorum Queues support. + +Relevant Issues: +`#6067 `_, +`#9121 `_ + +What's Changed +~~~~~~~~~~~~~~ + +- Bump mypy from 1.13.0 to 1.14.0 (#9476) +- Fix cassandra backend port settings not working (#9465) +- Unroll group when a group with a single item is chained using the | operator (#9456) +- fix(django): catch the right error when trying to close db connection (#9392) +- Replacing a task with a chain which contains a group now returns a result instead of hanging (#9484) +- Avoid using a group of one as it is now unrolled into a chain (#9510) +- Link to the correct IRC network (#9509) +- Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504) +- Update canvas.rst to fix output result from chain object (#9502) +- Unauthorized Changes Cleanup (#9528) +- [RE-APPROVED] fix(django): catch the right error when trying to close db connection (#9529) +- [RE-APPROVED] Link to the correct IRC network (#9531) +- [RE-APPROVED] Update canvas.rst to fix output result from chain object (#9532) +- Update test-ci-base.txt (#9539) +- Update install-pyenv.sh (#9540) +- Update elasticsearch requirement from <=8.17.0 to <=8.17.1 (#9518) +- Bump google-cloud-firestore from 2.19.0 to 2.20.0 (#9493) +- Bump mypy from 1.14.0 to 1.14.1 (#9483) +- Update elastic-transport requirement from <=8.15.1 to <=8.17.0 (#9490) +- Update Dockerfile by adding missing Python version 3.13 (#9549) +- Fix typo for default of sig (#9495) +- fix(crontab): resolve constructor type conflicts (#9551) +- worker_max_memory_per_child: kilobyte is 1024 bytes (#9553) +- Fix formatting in quorum queue docs (#9555) +- Bump cryptography from 44.0.0 to 44.0.1 (#9556) +- Fix the send_task method when detecting if the native delayed delivery approach is available (#9552) +- Reverted PR #7814 & minor code improvement (#9494) +- Improved donation and sponsorship visibility (#9558) +- Updated the Getting Help section, replacing deprecated with new resources (#9559) +- Fixed django example (#9562) +- Bump Kombu to v5.5.0rc3 (#9564) +- Bump ephem from 4.1.6 to 4.2 (#9565) +- Bump pytest-celery to v1.2.0 (#9568) +- Remove dependency on `pycurl` (#9526) +- Set TestWorkController.__test__ (#9574) +- Fixed bug when revoking by stamped headers a stamp that does not exist (#9575) +- Canvas Stamping Doc Fixes (#9578) +- Bugfix: Chord with a chord in header doesn't invoke error callback on inner chord header failure (default config) (#9580) +- Prepare for (pre) release: v5.5.0rc5 (#9582) + .. _version-5.5.0rc4: 5.5.0rc4 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 5bc0021d226..e6dba2738df 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0rc4 (immunity) +:Version: 5.5.0rc5 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From c6037e6310d54b5f4bea05d6d6476f40974f7b85 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2025 17:03:17 +0200 Subject: [PATCH 0966/1051] Bump google-cloud-firestore from 2.20.0 to 2.20.1 (#9584) Bumps [google-cloud-firestore](https://github.com/googleapis/python-firestore) from 2.20.0 to 2.20.1. - [Release notes](https://github.com/googleapis/python-firestore/releases) - [Changelog](https://github.com/googleapis/python-firestore/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/python-firestore/compare/v2.20.0...v2.20.1) --- updated-dependencies: - dependency-name: google-cloud-firestore dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/extras/gcs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/gcs.txt b/requirements/extras/gcs.txt index 28ba9ac9ae9..7a724e51b15 100644 --- a/requirements/extras/gcs.txt +++ b/requirements/extras/gcs.txt @@ -1,3 +1,3 @@ google-cloud-storage>=2.10.0 -google-cloud-firestore==2.20.0 +google-cloud-firestore==2.20.1 grpcio==1.67.0 From 64d750bb113310503018c3c43a04d333fc1d2859 Mon Sep 17 00:00:00 2001 From: Colin Watson Date: Sun, 2 Mar 2025 01:05:08 +0000 Subject: [PATCH 0967/1051] Fix tests with Click 8.2 (#9590) https://github.com/pallets/click/pull/2523 introduced changes to `click.testing.Result` that broke a few unit tests in celery. Although this Click version hasn't been fully released yet, this adjusts Celery to work with both old and new versions. --- CONTRIBUTORS.txt | 1 + t/unit/app/test_preload_cli.py | 4 ++-- t/unit/bin/test_control.py | 8 ++++---- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 39b73c8a38a..45f961d8a07 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -303,3 +303,4 @@ Shamil Abdulaev, 2024/08/05 Nikos Atlas, 2024/08/26 Marc Bresson, 2024/09/02 Narasux, 2024/09/09 +Colin Watson, 2025/03/01 diff --git a/t/unit/app/test_preload_cli.py b/t/unit/app/test_preload_cli.py index 9932f5b88d4..cb07b7866cb 100644 --- a/t/unit/app/test_preload_cli.py +++ b/t/unit/app/test_preload_cli.py @@ -38,7 +38,7 @@ def test_preload_options(subcommand_with_params: Tuple[str, ...], isolated_cli_r catch_exceptions=False, ) - assert "No such option: --ini" in res_without_preload.stdout + assert "No such option: --ini" in res_without_preload.output assert res_without_preload.exit_code == 2 res_with_preload = isolated_cli_runner.invoke( @@ -53,4 +53,4 @@ def test_preload_options(subcommand_with_params: Tuple[str, ...], isolated_cli_r catch_exceptions=False, ) - assert res_with_preload.exit_code == 0, res_with_preload.stdout + assert res_with_preload.exit_code == 0, res_with_preload.output diff --git a/t/unit/bin/test_control.py b/t/unit/bin/test_control.py index 6d3704e9dc2..74f6e4fb1ca 100644 --- a/t/unit/bin/test_control.py +++ b/t/unit/bin/test_control.py @@ -33,8 +33,8 @@ def test_custom_remote_command(celery_cmd, custom_cmd, isolated_cli_runner: CliR [*_GLOBAL_OPTIONS, celery_cmd, *_INSPECT_OPTIONS, *custom_cmd], catch_exceptions=False, ) - assert res.exit_code == EX_UNAVAILABLE, (res, res.stdout) - assert res.stdout.strip() == 'Error: No nodes replied within time constraint' + assert res.exit_code == EX_UNAVAILABLE, (res, res.output) + assert res.output.strip() == 'Error: No nodes replied within time constraint' @pytest.mark.parametrize( @@ -54,8 +54,8 @@ def test_unrecognized_remote_command(celery_cmd, remote_cmd, isolated_cli_runner [*_GLOBAL_OPTIONS, celery_cmd, *_INSPECT_OPTIONS, remote_cmd], catch_exceptions=False, ) - assert res.exit_code == 2, (res, res.stdout) - assert f'Error: Command {remote_cmd} not recognized. Available {celery_cmd} commands: ' in res.stdout + assert res.exit_code == 2, (res, res.output) + assert f'Error: Command {remote_cmd} not recognized. Available {celery_cmd} commands: ' in res.output _expected_inspect_regex = ( From ad8dcec6f3953d3c7e5b0a61bb75f1f77edcf0a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 22:46:01 +0000 Subject: [PATCH 0968/1051] Bump cryptography from 44.0.1 to 44.0.2 Bumps [cryptography](https://github.com/pyca/cryptography) from 44.0.1 to 44.0.2. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/44.0.1...44.0.2) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements/extras/auth.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/auth.txt b/requirements/extras/auth.txt index 241eda13e6a..e9a03334287 100644 --- a/requirements/extras/auth.txt +++ b/requirements/extras/auth.txt @@ -1 +1 @@ -cryptography==44.0.1 +cryptography==44.0.2 From 25a954ff626c5b205105e4da8f292f1fd3979095 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 4 Mar 2025 23:01:25 +0000 Subject: [PATCH 0969/1051] Update elasticsearch requirement from <=8.17.1 to <=8.17.2 Updates the requirements on [elasticsearch](https://github.com/elastic/elasticsearch-py) to permit the latest version. - [Release notes](https://github.com/elastic/elasticsearch-py/releases) - [Commits](https://github.com/elastic/elasticsearch-py/compare/0.4.1...v8.17.2) --- updated-dependencies: - dependency-name: elasticsearch dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index 80d47852d1e..ea674db9915 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ -elasticsearch<=8.17.1 +elasticsearch<=8.17.2 elastic-transport<=8.17.0 From 980cdae7f5424200ad73c1d27304d80805acf9ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 04:25:34 +0200 Subject: [PATCH 0970/1051] Bump pytest from 8.3.4 to 8.3.5 (#9598) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.4 to 8.3.5. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.3.4...8.3.5) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/test.txt b/requirements/test.txt index c7f9a07dff1..1cb76e1c8d0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,4 @@ -pytest==8.3.4 +pytest==8.3.5 pytest-celery[all]>=1.2.0 pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" From c8de124d1ce0443ede4447b4d6d79d9b237718c8 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 6 Mar 2025 22:44:54 +0200 Subject: [PATCH 0971/1051] Refactored and Enhanced DelayedDelivery bootstep (#9599) * Refactored DelayedDelivery bootstep * Added unit tests * Added smoke tests --- celery/worker/consumer/delayed_delivery.py | 226 ++++++++++++++++-- .../test_native_delayed_delivery.py | 123 +++++++++- t/unit/worker/test_native_delayed_delivery.py | 142 +++++++++++ 3 files changed, 469 insertions(+), 22 deletions(-) diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py index d6672d32f5e..d7cacd08068 100644 --- a/celery/worker/consumer/delayed_delivery.py +++ b/celery/worker/consumer/delayed_delivery.py @@ -1,5 +1,14 @@ +"""Native delayed delivery functionality for Celery workers. + +This module provides the DelayedDelivery bootstep which handles setup and configuration +of native delayed delivery functionality when using quorum queues. +""" +from typing import Optional, Set, ValuesView + +from kombu import Connection, Queue from kombu.transport.native_delayed_delivery import (bind_queue_to_native_delayed_delivery_exchange, declare_native_delayed_delivery_exchanges_and_queues) +from kombu.utils.functional import retry_over_time from celery import Celery, bootsteps from celery.utils.log import get_logger @@ -11,27 +20,216 @@ logger = get_logger(__name__) +# Default retry settings +RETRY_INTERVAL = 1.0 # seconds between retries +MAX_RETRIES = 3 # maximum number of retries + + +# Valid queue types for delayed delivery +VALID_QUEUE_TYPES = {'classic', 'quorum'} + + class DelayedDelivery(bootsteps.StartStopStep): - """This bootstep declares native delayed delivery queues and exchanges and binds all queues to them""" + """Bootstep that sets up native delayed delivery functionality. + + This component handles the setup and configuration of native delayed delivery + for Celery workers. It is automatically included when quorum queues are + detected in the application configuration. + + Responsibilities: + - Declaring native delayed delivery exchanges and queues + - Binding all application queues to the delayed delivery exchanges + - Handling connection failures gracefully with retries + - Validating configuration settings + """ + requires = (Tasks,) - def include_if(self, c): + def include_if(self, c: Consumer) -> bool: + """Determine if this bootstep should be included. + + Args: + c: The Celery consumer instance + + Returns: + bool: True if quorum queues are detected, False otherwise + """ return detect_quorum_queues(c.app, c.app.connection_for_write().transport.driver_type)[0] - def start(self, c: Consumer): + def start(self, c: Consumer) -> None: + """Initialize delayed delivery for all broker URLs. + + Attempts to set up delayed delivery for each broker URL in the configuration. + Failures are logged but don't prevent attempting remaining URLs. + + Args: + c: The Celery consumer instance + + Raises: + ValueError: If configuration validation fails + """ app: Celery = c.app - for broker_url in app.conf.broker_url.split(';'): + try: + self._validate_configuration(app) + except ValueError as e: + logger.critical("Configuration validation failed: %s", str(e)) + raise + + broker_urls = self._validate_broker_urls(app.conf.broker_url) + setup_errors = [] + + for broker_url in broker_urls: + try: + retry_over_time( + self._setup_delayed_delivery, + args=(c, broker_url), + catch=(ConnectionRefusedError, OSError), + errback=self._on_retry, + interval_start=RETRY_INTERVAL, + max_retries=MAX_RETRIES, + ) + except Exception as e: + logger.warning( + "Failed to setup delayed delivery for %r: %s", + broker_url, str(e) + ) + setup_errors.append((broker_url, e)) + + if len(setup_errors) == len(broker_urls): + logger.critical( + "Failed to setup delayed delivery for all broker URLs. " + "Native delayed delivery will not be available." + ) + + def _setup_delayed_delivery(self, c: Consumer, broker_url: str) -> None: + """Set up delayed delivery for a specific broker URL. + + Args: + c: The Celery consumer instance + broker_url: The broker URL to configure + + Raises: + ConnectionRefusedError: If connection to the broker fails + OSError: If there are network-related issues + Exception: For other unexpected errors during setup + """ + connection: Connection = c.app.connection_for_write(url=broker_url) + queue_type = c.app.conf.broker_native_delayed_delivery_queue_type + logger.debug( + "Setting up delayed delivery for broker %r with queue type %r", + broker_url, queue_type + ) + + try: + declare_native_delayed_delivery_exchanges_and_queues( + connection, + queue_type + ) + except Exception as e: + logger.warning( + "Failed to declare exchanges and queues for %r: %s", + broker_url, str(e) + ) + raise + + try: + self._bind_queues(c.app, connection) + except Exception as e: + logger.warning( + "Failed to bind queues for %r: %s", + broker_url, str(e) + ) + raise + + def _bind_queues(self, app: Celery, connection: Connection) -> None: + """Bind all application queues to delayed delivery exchanges. + + Args: + app: The Celery application instance + connection: The broker connection to use + + Raises: + Exception: If queue binding fails + """ + queues: ValuesView[Queue] = app.amqp.queues.values() + if not queues: + logger.warning("No queues found to bind for delayed delivery") + return + + for queue in queues: try: - # We use connection for write directly to avoid using ensure_connection() - connection = c.app.connection_for_write(url=broker_url) - declare_native_delayed_delivery_exchanges_and_queues( - connection, - app.conf.broker_native_delayed_delivery_queue_type + logger.debug("Binding queue %r to delayed delivery exchange", queue.name) + bind_queue_to_native_delayed_delivery_exchange(connection, queue) + except Exception as e: + logger.error( + "Failed to bind queue %r: %s", + queue.name, str(e) ) + raise + + def _on_retry(self, exc: Exception, intervals_count: int) -> None: + """Callback for retry attempts. + + Args: + exc: The exception that triggered the retry + intervals_count: Number of retry attempts so far + """ + logger.warning( + "Retrying delayed delivery setup (attempt %d/%d) after error: %s", + intervals_count + 1, MAX_RETRIES, str(exc) + ) + + def _validate_configuration(self, app: Celery) -> None: + """Validate all required configuration settings. + + Args: + app: The Celery application instance + + Raises: + ValueError: If any configuration is invalid + """ + # Validate broker URLs + self._validate_broker_urls(app.conf.broker_url) + + # Validate queue type + self._validate_queue_type(app.conf.broker_native_delayed_delivery_queue_type) + + def _validate_broker_urls(self, urls: str) -> Set[str]: + """Validate and split broker URLs. + + Args: + urls: Semicolon-separated broker URLs + + Returns: + Set of valid broker URLs + + Raises: + ValueError: If no valid broker URLs are found + """ + if not urls or not urls.strip(): + raise ValueError("broker_url configuration is empty") + + valid_urls = {url.strip() for url in urls.split(';') if url.strip()} + if not valid_urls: + raise ValueError("No valid broker URLs found in configuration") + + return valid_urls + + def _validate_queue_type(self, queue_type: Optional[str]) -> None: + """Validate the queue type configuration. + + Args: + queue_type: The configured queue type + + Raises: + ValueError: If queue type is invalid + """ + if not queue_type: + raise ValueError("broker_native_delayed_delivery_queue_type is not configured") - for queue in app.amqp.queues.values(): - bind_queue_to_native_delayed_delivery_exchange(connection, queue) - except ConnectionRefusedError: - # We may receive this error if a fail-over occurs - continue + if queue_type not in VALID_QUEUE_TYPES: + sorted_types = sorted(VALID_QUEUE_TYPES) + raise ValueError( + f"Invalid queue type {queue_type!r}. Must be one of: {', '.join(sorted_types)}" + ) diff --git a/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py index 904b7047287..f68efaa481e 100644 --- a/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py +++ b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py @@ -1,14 +1,14 @@ -from datetime import timedelta +import time +from datetime import datetime, timedelta from datetime import timezone as datetime_timezone import pytest import requests -from future.backports.datetime import datetime from pytest_celery import CeleryTestSetup from requests.auth import HTTPBasicAuth -from celery import Celery -from t.smoke.tasks import noop +from celery import Celery, chain +from t.smoke.tasks import add, noop from t.smoke.tests.quorum_queues.conftest import RabbitMQManagementBroker @@ -82,7 +82,7 @@ def test_native_delayed_delivery_queue_configuration( ): queue_configuration_test_helper(celery_setup, queues) - def test_native_delayed_delivery_exchange_configuration(self, exchanges: list, celery_setup: CeleryTestSetup): + def test_native_delayed_delivery_exchange_configuration(self, exchanges: list): exchange_configuration_test_helper(exchanges) @@ -101,12 +101,11 @@ def default_worker_app(self, default_worker_app: Celery) -> Celery: def test_native_delayed_delivery_queue_configuration( self, queues: list, - celery_setup: CeleryTestSetup, - default_worker_app: Celery + celery_setup: CeleryTestSetup ): queue_configuration_test_helper(celery_setup, queues) - def test_native_delayed_delivery_exchange_configuration(self, exchanges: list, celery_setup: CeleryTestSetup): + def test_native_delayed_delivery_exchange_configuration(self, exchanges: list): exchange_configuration_test_helper(exchanges) @@ -148,3 +147,111 @@ def test_eta_in_the_past(self, celery_setup: CeleryTestSetup): result = s.apply_async(eta=(datetime.now(datetime_timezone.utc) - timedelta(0, 5)).isoformat()) result.get(timeout=10) + + def test_long_delay(self, celery_setup: CeleryTestSetup, queues: list): + """Test task with a delay longer than 24 hours.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + future_time = datetime.now(datetime_timezone.utc) + timedelta(hours=25) + result = s.apply_async(eta=future_time) + + assert result.status == "PENDING", ( + f"Task should be PENDING but was {result.status}" + ) + assert result.ready() is False, ( + "Task with future ETA should not be ready" + ) + + def test_multiple_tasks_same_eta(self, celery_setup: CeleryTestSetup): + """Test multiple tasks scheduled for the same time.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + future_time = datetime.now(datetime_timezone.utc) + timedelta(seconds=5) + + results = [ + s.apply_async(eta=future_time) + for _ in range(5) + ] + + for result in results: + result.get(timeout=10) + assert result.status == "SUCCESS" + + def test_multiple_tasks_different_delays(self, celery_setup: CeleryTestSetup): + """Test multiple tasks with different delay times.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + now = datetime.now(datetime_timezone.utc) + + results = [ + s.apply_async(eta=now + timedelta(seconds=delay)) + for delay in (2, 4, 6) + ] + + completion_times = [] + for result in results: + result.get(timeout=10) + completion_times.append(datetime.now(datetime_timezone.utc)) + + for i in range(1, len(completion_times)): + assert completion_times[i] > completion_times[i-1], ( + f"Task {i} completed at {completion_times[i]} which is not after " + f"task {i-1} completed at {completion_times[i-1]}" + ) + + def test_revoke_delayed_task(self, celery_setup: CeleryTestSetup): + """Test revoking a delayed task before it executes.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + result = s.apply_async(countdown=10) + + assert result.status == "PENDING" + result.revoke() + + time.sleep(12) + assert result.status == "REVOKED" + + def test_chain_with_delays(self, celery_setup: CeleryTestSetup): + """Test chain of tasks with delays between them.""" + c = chain( + add.s(1, 2).set(countdown=2), + add.s(3).set(countdown=2), + add.s(4).set(countdown=2) + ).set(queue=celery_setup.worker.worker_queue) + + result = c() + assert result.get(timeout=15) == 10 + + def test_zero_delay(self, celery_setup: CeleryTestSetup): + """Test task with zero delay/countdown.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(countdown=0) + result.get(timeout=10) + assert result.status == "SUCCESS" + + def test_negative_countdown(self, celery_setup: CeleryTestSetup): + """Test task with negative countdown (should execute immediately).""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(countdown=-5) + result.get(timeout=10) + assert result.status == "SUCCESS" + + def test_very_short_delay(self, celery_setup: CeleryTestSetup): + """Test task with very short delay (1 second).""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + + result = s.apply_async(countdown=1) + result.get(timeout=10) + assert result.status == "SUCCESS" + + def test_concurrent_delayed_tasks(self, celery_setup: CeleryTestSetup): + """Test many concurrent delayed tasks.""" + s = noop.s().set(queue=celery_setup.worker.worker_queue) + future_time = datetime.now(datetime_timezone.utc) + timedelta(seconds=2) + + results = [ + s.apply_async(eta=future_time) + for _ in range(100) + ] + + for result in results: + result.get(timeout=10) + assert result.status == "SUCCESS" diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py index 2170869d7ef..fecdb514fa9 100644 --- a/t/unit/worker/test_native_delayed_delivery.py +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -1,6 +1,7 @@ from logging import LogRecord from unittest.mock import Mock, patch +import pytest from kombu import Exchange, Queue from celery.worker.consumer.delayed_delivery import DelayedDelivery @@ -72,3 +73,144 @@ def test_start_native_delayed_delivery_fanout_exchange(self, caplog): delayed_delivery.start(consumer_mock) assert len(caplog.records) == 0 + + def test_validate_broker_urls_empty(self): + delayed_delivery = DelayedDelivery(Mock()) + + with pytest.raises(ValueError, match="broker_url configuration is empty"): + delayed_delivery._validate_broker_urls("") + + with pytest.raises(ValueError, match="broker_url configuration is empty"): + delayed_delivery._validate_broker_urls(None) + + def test_validate_broker_urls_invalid(self): + delayed_delivery = DelayedDelivery(Mock()) + + with pytest.raises(ValueError, match="No valid broker URLs found in configuration"): + delayed_delivery._validate_broker_urls(" ; ; ") + + def test_validate_broker_urls_valid(self): + delayed_delivery = DelayedDelivery(Mock()) + + urls = delayed_delivery._validate_broker_urls("amqp://localhost;amqp://remote") + assert urls == {"amqp://localhost", "amqp://remote"} + + def test_validate_queue_type_empty(self): + delayed_delivery = DelayedDelivery(Mock()) + + with pytest.raises(ValueError, match="broker_native_delayed_delivery_queue_type is not configured"): + delayed_delivery._validate_queue_type(None) + + with pytest.raises(ValueError, match="broker_native_delayed_delivery_queue_type is not configured"): + delayed_delivery._validate_queue_type("") + + def test_validate_queue_type_invalid(self): + delayed_delivery = DelayedDelivery(Mock()) + + with pytest.raises(ValueError, match="Invalid queue type 'invalid'. Must be one of: classic, quorum"): + delayed_delivery._validate_queue_type("invalid") + + def test_validate_queue_type_valid(self): + delayed_delivery = DelayedDelivery(Mock()) + + delayed_delivery._validate_queue_type("classic") + delayed_delivery._validate_queue_type("quorum") + + @patch('celery.worker.consumer.delayed_delivery.retry_over_time') + def test_start_retry_on_connection_error(self, mock_retry, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://localhost;amqp://backup' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='topic')) + } + + mock_retry.side_effect = ConnectionRefusedError("Connection refused") + + delayed_delivery = DelayedDelivery(consumer_mock) + delayed_delivery.start(consumer_mock) + + # Should try both URLs + assert mock_retry.call_count == 2 + # Should log warning for each failed attempt + assert len([r for r in caplog.records if r.levelname == "WARNING"]) == 2 + # Should log critical when all URLs fail + assert len([r for r in caplog.records if r.levelname == "CRITICAL"]) == 1 + + def test_on_retry_logging(self, caplog): + delayed_delivery = DelayedDelivery(Mock()) + exc = ConnectionRefusedError("Connection refused") + + delayed_delivery._on_retry(exc, 1) + + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.levelname == "WARNING" + assert "attempt 2/3" in record.message + assert "Connection refused" in record.message + + def test_start_with_no_queues(self, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = {} + + delayed_delivery = DelayedDelivery(consumer_mock) + delayed_delivery.start(consumer_mock) + + assert len([r for r in caplog.records if r.levelname == "WARNING"]) == 1 + assert "No queues found to bind for delayed delivery" in caplog.records[0].message + + def test_start_configuration_validation_error(self, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_url = "" # Invalid broker URL + + delayed_delivery = DelayedDelivery(consumer_mock) + + with pytest.raises(ValueError, match="broker_url configuration is empty"): + delayed_delivery.start(consumer_mock) + + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.levelname == "CRITICAL" + assert "Configuration validation failed" in record.message + + @patch('celery.worker.consumer.delayed_delivery.declare_native_delayed_delivery_exchanges_and_queues') + def test_setup_declare_error(self, mock_declare, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='topic')) + } + + mock_declare.side_effect = Exception("Failed to declare") + + delayed_delivery = DelayedDelivery(consumer_mock) + delayed_delivery.start(consumer_mock) + + # Should log warning and critical messages + assert len([r for r in caplog.records if r.levelname == "WARNING"]) == 2 + assert len([r for r in caplog.records if r.levelname == "CRITICAL"]) == 1 + assert any("Failed to declare exchanges and queues" in r.message for r in caplog.records) + assert any("Failed to setup delayed delivery for all broker URLs" in r.message for r in caplog.records) + + @patch('celery.worker.consumer.delayed_delivery.bind_queue_to_native_delayed_delivery_exchange') + def test_setup_bind_error(self, mock_bind, caplog): + consumer_mock = Mock() + consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' + consumer_mock.app.conf.broker_url = 'amqp://' + consumer_mock.app.amqp.queues = { + 'celery': Queue('celery', exchange=Exchange('celery', type='topic')) + } + + mock_bind.side_effect = Exception("Failed to bind") + + delayed_delivery = DelayedDelivery(consumer_mock) + delayed_delivery.start(consumer_mock) + + # Should log warning and critical messages + assert len([r for r in caplog.records if r.levelname == "WARNING"]) == 2 + assert len([r for r in caplog.records if r.levelname == "CRITICAL"]) == 1 + assert any("Failed to bind queue" in r.message for r in caplog.records) + assert any("Failed to setup delayed delivery for all broker URLs" in r.message for r in caplog.records) From 0316198dc2511e34028b37a67607c067151e7c91 Mon Sep 17 00:00:00 2001 From: Yaroslav Pekatoros <115944825+ya-pekatoros@users.noreply.github.com> Date: Wed, 12 Mar 2025 11:27:31 +0300 Subject: [PATCH 0972/1051] Improve docs about acks_on_failure_or_timeout (#9577) * Clearify docs about infinite max_retries for tasks * Clarification for actual default for acks_on_failure_or_timeout --------- Co-authored-by: Yaroslav Pekatoros Co-authored-by: Asif Saif Uddin --- celery/app/task.py | 4 ++-- docs/userguide/configuration.rst | 4 ++-- docs/userguide/tasks.rst | 11 +++++++++++ 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 90ba8552d4f..60434992dc3 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -693,8 +693,8 @@ def retry(self, args=None, kwargs=None, exc=None, throw=True, this execution. Changes to this parameter don't propagate to subsequent task retry attempts. A value of :const:`None`, means "use the default", so if you want infinite retries you'd - have to set the :attr:`max_retries` attribute of the task to - :const:`None` first. + have to set the :attr:`max_retries` attribute of the task class to + :const:`None`. time_limit (int): If set, overrides the default time limit. soft_time_limit (int): If set, overrides the default soft time limit. diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 56521e0400c..1745cd13842 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -610,12 +610,12 @@ has been executed, not *right before* (the default behavior). ``task_acks_on_failure_or_timeout`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Default: Enabled +Default: Disabled When enabled messages for all tasks will be acknowledged even if they fail or time out. -Configuring this setting only applies to tasks that are +This setting only applies to tasks that are acknowledged **after** they have been executed and only if :setting:`task_acks_late` is enabled. diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 60c5e89f259..4bc0c6f6701 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -734,6 +734,17 @@ avoid having all the tasks run at the same moment. It will also cap the maximum backoff delay to 10 minutes. All these settings can be customized via options documented below. +Retry forever +------------------------------------ + +If you want to retry task forever, you should set `max_retries` attribute of the task to ``None``: + +.. code-block:: python + + @app.task(max_retries=None) + def x(): + ... + .. versionadded:: 4.4 You can also set `autoretry_for`, `max_retries`, `retry_backoff`, `retry_backoff_max` and `retry_jitter` options in class-based tasks: From 3639a8ca3879c557f65c63dc8a1498b5b56ea5a5 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 13 Mar 2025 01:18:59 +0200 Subject: [PATCH 0973/1051] Update SECURITY.md (#9609) Updated version and relevant maintainers --- SECURITY.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 61902e2c492..0f4cb505170 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,16 +2,14 @@ ## Supported Versions -Use this section to tell people about which versions of your project are -currently being supported with security updates. - | Version | Supported | | ------- | ------------------ | -| 5.3.x | :white_check_mark: | +| 5.4.x | :white_check_mark: | +| 5.3.x | :x: | | 5.2.x | :x: | | 5.1.x | :x: | | < 5.0 | :x: | ## Reporting a Vulnerability -Please reach out to auvipy@gmail.com & omer.drow@gmail.com for reporting security concerns via email. +Please reach out to tomer.nosrati@gmail.com or auvipy@gmail.com for reporting security concerns via email. From a641554a5b243d812b18d5ffdcc5eeae6de88726 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 13 Mar 2025 13:38:42 +0600 Subject: [PATCH 0974/1051] remove flake8plus as not needed anymore (#9610) --- Makefile | 11 +---------- requirements/pkgutils.txt | 1 - 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/Makefile b/Makefile index f333376ad1c..d28ac57dcf7 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,6 @@ TOX=tox ICONV=iconv FLAKE8=flake8 PYROMA=pyroma -FLAKEPLUS=flakeplus SPHINX2RST=sphinx2rst RST2HTML=rst2html.py DEVNULL=/dev/null @@ -22,7 +21,6 @@ CONTRIBUTING=CONTRIBUTING.rst CONTRIBUTING_SRC="docs/contributing.rst" SPHINX_HTMLDIR="${SPHINX_BUILDDIR}/html" DOCUMENTATION=Documentation -FLAKEPLUSTARGET=2.7 WORKER_GRAPH="docs/images/worker_graph_full.png" @@ -40,7 +38,6 @@ help: @echo " contribcheck - Check CONTRIBUTING.rst encoding" @echo " flakes -------- - Check code for syntax and style errors." @echo " flakecheck - Run flake8 on the source code." - @echo " flakepluscheck - Run flakeplus on the source code." @echo "readme - Regenerate README.rst file." @echo "contrib - Regenerate CONTRIBUTING.rst file" @echo "clean-dist --------- - Clean all distribution build artifacts." @@ -100,13 +97,7 @@ flakecheck: flakediag: -$(MAKE) flakecheck -flakepluscheck: - $(FLAKEPLUS) --$(FLAKEPLUSTARGET) "$(PROJ)" "$(TESTDIR)" - -flakeplusdiag: - -$(MAKE) flakepluscheck - -flakes: flakediag flakeplusdiag +flakes: flakediag clean-readme: -rm -f $(README) diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt index fd180f53be3..eefe5d34af0 100644 --- a/requirements/pkgutils.txt +++ b/requirements/pkgutils.txt @@ -1,7 +1,6 @@ setuptools>=40.8.0 wheel>=0.33.1 flake8>=3.8.3 -flakeplus>=1.1 flake8-docstrings>=1.7.0 pydocstyle==6.3.0 tox>=3.8.4 From e78816bf8d6a5009aaa2431e296712c5ca25501e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 13 Mar 2025 14:08:53 +0600 Subject: [PATCH 0975/1051] remove [bdist_wheel] universal = 0 from setup.cfg as not needed (#9611) this do not need anymore in python3 only setup --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 1d66df8b7a2..a74a438d952 100644 --- a/setup.cfg +++ b/setup.cfg @@ -37,8 +37,6 @@ requires = backports.zoneinfo>=0.2.1;python_version<'3.9' billiard >=4.1.0,<5.0 kombu >= 5.3.4,<6.0.0 -[bdist_wheel] -universal = 0 [metadata] license_files = LICENSE From eb5a700696ebdd366584c21fb1feedf503483089 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 13 Mar 2025 19:13:58 +0600 Subject: [PATCH 0976/1051] remove importlib-metadata as not needed in python3.8 anymore (#9612) --- requirements/default.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index dcec525e00a..bd93b018735 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -5,6 +5,5 @@ click>=8.1.2,<9.0 click-didyoumean>=0.3.0 click-repl>=0.2.0 click-plugins>=1.1.1 -importlib-metadata>=3.6; python_version < '3.8' backports.zoneinfo[tzdata]>=0.2.1; python_version < '3.9' python-dateutil>=2.8.2 From 301dda12eac95f56228111869d9fc156b85a8942 Mon Sep 17 00:00:00 2001 From: Peter Drienko Date: Thu, 13 Mar 2025 17:53:30 +0100 Subject: [PATCH 0977/1051] feat: define exception_safe_to_retry for redisbackend (#9614) --- celery/backends/redis.py | 5 +++++ t/unit/backends/test_redis.py | 13 +++++++++++++ 2 files changed, 18 insertions(+) diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 8acc60831bf..3e3ef737f95 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -359,6 +359,11 @@ def _params_from_url(self, url, defaults): connparams.update(query) return connparams + def exception_safe_to_retry(self, exc): + if isinstance(exc, self.connection_errors): + return True + return False + @cached_property def retry_policy(self): retry_policy = super().retry_policy diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py index 876d747dde3..314327ef174 100644 --- a/t/unit/backends/test_redis.py +++ b/t/unit/backends/test_redis.py @@ -9,6 +9,11 @@ import pytest +try: + from redis import exceptions +except ImportError: + exceptions = None + from celery import signature, states, uuid from celery.canvas import Signature from celery.contrib.testing.mocks import ContextMock @@ -694,6 +699,14 @@ def fn(): max_retries=2, interval_start=0, interval_step=0.01, interval_max=1 ) + def test_exception_safe_to_retry(self): + b = self.Backend(app=self.app) + assert not b.exception_safe_to_retry(Exception("failed")) + assert not b.exception_safe_to_retry(BaseException("failed")) + assert not b.exception_safe_to_retry(exceptions.RedisError("redis error")) + assert b.exception_safe_to_retry(exceptions.ConnectionError("service unavailable")) + assert b.exception_safe_to_retry(exceptions.TimeoutError("timeout")) + def test_incr(self): self.b.client = Mock(name='client') self.b.incr('foo') From b41d718054dd2856ff83f2b9e912082f0b22f317 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 13 Mar 2025 20:38:57 +0200 Subject: [PATCH 0978/1051] Bump Kombu to v5.5.0 (#9615) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index bd93b018735..64a26280af6 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.0rc3,<6.0 +kombu>=5.5.0,<6.0 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From ba44ac44d0bb725fbff277f8394cdcd56d41d040 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 22:55:33 +0000 Subject: [PATCH 0979/1051] Update elastic-transport requirement from <=8.17.0 to <=8.17.1 Updates the requirements on [elastic-transport](https://github.com/elastic/elastic-transport-python) to permit the latest version. - [Release notes](https://github.com/elastic/elastic-transport-python/releases) - [Changelog](https://github.com/elastic/elastic-transport-python/blob/main/CHANGELOG.md) - [Commits](https://github.com/elastic/elastic-transport-python/compare/0.1.0b0...v8.17.1) --- updated-dependencies: - dependency-name: elastic-transport dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements/extras/elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/elasticsearch.txt b/requirements/extras/elasticsearch.txt index ea674db9915..58cdcae1836 100644 --- a/requirements/extras/elasticsearch.txt +++ b/requirements/extras/elasticsearch.txt @@ -1,2 +1,2 @@ elasticsearch<=8.17.2 -elastic-transport<=8.17.0 +elastic-transport<=8.17.1 From d669c16521777d9db37e643793b95c505bcd386f Mon Sep 17 00:00:00 2001 From: Isidro Date: Sun, 16 Mar 2025 01:43:30 +0100 Subject: [PATCH 0980/1051] Worker must restart after a backend change (#9618) Otherwise, the result.get() would time out --- docs/getting-started/first-steps-with-celery.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst index 2637851d3a3..88d9b0b0af6 100644 --- a/docs/getting-started/first-steps-with-celery.rst +++ b/docs/getting-started/first-steps-with-celery.rst @@ -245,7 +245,7 @@ the message broker (a popular combination): To read more about result backends please see :ref:`task-result-backends`. -Now with the result backend configured, close the current python session and import the +Now with the result backend configured, restart the worker, close the current python session and import the ``tasks`` module again to put the changes into effect. This time you'll hold on to the :class:`~@AsyncResult` instance returned when you call a task: From 78bd187e756ad22c38dfe44fd45d9e009c40e045 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 17 Mar 2025 05:11:07 +0600 Subject: [PATCH 0981/1051] Revert "Improve docs about acks_on_failure_or_timeout (#9577)" (#9606) This reverts commit 0316198dc2511e34028b37a67607c067151e7c91. --- celery/app/task.py | 4 ++-- docs/userguide/configuration.rst | 4 ++-- docs/userguide/tasks.rst | 11 ----------- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/celery/app/task.py b/celery/app/task.py index 60434992dc3..90ba8552d4f 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -693,8 +693,8 @@ def retry(self, args=None, kwargs=None, exc=None, throw=True, this execution. Changes to this parameter don't propagate to subsequent task retry attempts. A value of :const:`None`, means "use the default", so if you want infinite retries you'd - have to set the :attr:`max_retries` attribute of the task class to - :const:`None`. + have to set the :attr:`max_retries` attribute of the task to + :const:`None` first. time_limit (int): If set, overrides the default time limit. soft_time_limit (int): If set, overrides the default soft time limit. diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 1745cd13842..56521e0400c 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -610,12 +610,12 @@ has been executed, not *right before* (the default behavior). ``task_acks_on_failure_or_timeout`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Default: Disabled +Default: Enabled When enabled messages for all tasks will be acknowledged even if they fail or time out. -This setting only applies to tasks that are +Configuring this setting only applies to tasks that are acknowledged **after** they have been executed and only if :setting:`task_acks_late` is enabled. diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 4bc0c6f6701..60c5e89f259 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -734,17 +734,6 @@ avoid having all the tasks run at the same moment. It will also cap the maximum backoff delay to 10 minutes. All these settings can be customized via options documented below. -Retry forever ------------------------------------- - -If you want to retry task forever, you should set `max_retries` attribute of the task to ``None``: - -.. code-block:: python - - @app.task(max_retries=None) - def x(): - ... - .. versionadded:: 4.4 You can also set `autoretry_for`, `max_retries`, `retry_backoff`, `retry_backoff_max` and `retry_jitter` options in class-based tasks: From 2ca5507fc59dd0269f9ee9ec7d2606dba5252e80 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 18 Mar 2025 07:29:38 +0200 Subject: [PATCH 0982/1051] Improve CI stability and performance (#9624) * Added back Python 3.9-3.11 to Smoke Tests * Reduced smoke tests timeout from 1h to 20 minutes * Refactored Smoke Tests CI * [TMP] removed unit and integration tests * Added retry mechanism to the smoke tests CI via GitHub Actions * Revert "[TMP] removed unit and integration tests" This reverts commit 27a80ed59a49537a4ded8263323a9843ff2db2af. * Added retry mechanism to the integration tests CI via GitHub Actions * Added @flaky to `test_revoke_by_stamped_headers_no_match` --- .github/workflows/python-package.yml | 34 +++++++++++++++++++--------- t/integration/test_tasks.py | 1 + 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 794788269fd..7269abe857c 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -124,10 +124,14 @@ jobs: - name: > Run tox for "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" - timeout-minutes: 60 - run: > - tox --verbose --verbose -e - "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + uses: nick-fields/retry@v3 + with: + timeout_minutes: 60 + max_attempts: 2 + retry_wait_seconds: 0 + command: | + tox --verbose --verbose -e "${{ matrix.python-version }}-integration-${{ matrix.toxenv }}" -vv + Smoke: needs: - Unit @@ -136,11 +140,15 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.12', '3.13'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] test-case: [ - 'failover', - 'quorum_queues', - 'stamping', + 'test_broker_failover.py', + 'test_worker_failover.py', + 'test_native_delayed_delivery.py', + 'test_quorum_queues.py', + 'test_hybrid_cluster.py', + 'test_revoke.py', + 'test_visitor.py', 'test_canvas.py', 'test_consumer.py', 'test_control.py', @@ -175,6 +183,10 @@ jobs: run: python -m pip install --upgrade pip tox tox-gh-actions - name: Run tox for "${{ matrix.python-version }}-smoke-${{ matrix.test-case }}" - timeout-minutes: 60 - run: | - tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} + uses: nick-fields/retry@v3 + with: + timeout_minutes: 20 + max_attempts: 2 + retry_wait_seconds: 0 + command: | + tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 1b175a01320..4b0839309a8 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -318,6 +318,7 @@ def on_signature(self, sig, **headers) -> dict: assert result.successful() is False worker_state.revoked_stamps.clear() + @flaky def test_revoke_by_stamped_headers_no_match(self, manager): response = manager.app.control.revoke_by_stamped_headers( {"myheader": ["myvalue"]}, From 10cecefc0b9a504f030f389cdb807f21ad60898a Mon Sep 17 00:00:00 2001 From: Soham Date: Tue, 18 Mar 2025 11:09:51 +0530 Subject: [PATCH 0983/1051] Improved explanation for Database transactions at user guide for tasks (#9617) * Improved explanation for Database transactions at user guide for tasks * Update docs/userguide/tasks.rst --------- Co-authored-by: Asif Saif Uddin --- docs/userguide/tasks.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 60c5e89f259..6d5d605dca6 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -2039,9 +2039,7 @@ then passing the primary key to a task. It uses the `transaction.atomic` decorator, that will commit the transaction when the view returns, or roll back if the view raises an exception. -There's a race condition if the task starts executing -before the transaction has been committed; The database object doesn't exist -yet! +There is a race condition because transactions are atomic. This means the article object is not persisted to the database until after the view function returns a response. If the asynchronous task starts executing before the transaction is committed, it may attempt to query the article object before it exists. To prevent this, we need to ensure that the transaction is committed before triggering the task. The solution is to use :meth:`~celery.contrib.django.task.DjangoTask.delay_on_commit` instead: From 9226cb476f8755e6e199225ce41d03c66f00426e Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 19 Mar 2025 14:33:59 +0600 Subject: [PATCH 0984/1051] update tests to use python 3.8 codes only (#9627) --- t/unit/tasks/test_canvas.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 6d287848c31..d4ed5e39afd 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -90,15 +90,7 @@ def replace_with_chain(self, x, y): @self.app.task(shared=False) def xprod(numbers): - try: - return math.prod(numbers) - except AttributeError: - # TODO: Drop this backport once - # we drop support for Python 3.7 - import operator - from functools import reduce - - return reduce(operator.mul, numbers) + return math.prod(numbers) self.xprod = xprod From 53dc515c77efd1fb19aad599b05ad96d886e5327 Mon Sep 17 00:00:00 2001 From: Jackson Kontny Date: Thu, 20 Mar 2025 02:30:09 -0500 Subject: [PATCH 0985/1051] Reject task when hard time limit is exceeded with acks_on_failure_or_timeout=False (#9626) Co-authored-by: Jackson Kontny Co-authored-by: Asif Saif Uddin --- celery/worker/request.py | 4 ++-- t/unit/worker/test_request.py | 42 ++++++++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/celery/worker/request.py b/celery/worker/request.py index 1e337b84fc5..df99b549270 100644 --- a/celery/worker/request.py +++ b/celery/worker/request.py @@ -602,8 +602,8 @@ def on_failure(self, exc_info, send_failed_event=True, return_ok=False): is_worker_lost = isinstance(exc, WorkerLostError) if self.task.acks_late: reject = ( - self.task.reject_on_worker_lost and - is_worker_lost + (self.task.reject_on_worker_lost and is_worker_lost) + or (isinstance(exc, TimeLimitExceeded) and not self.task.acks_on_failure_or_timeout) ) ack = self.task.acks_on_failure_or_timeout if reject: diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py index 44408599dc7..172ca5162ac 100644 --- a/t/unit/worker/test_request.py +++ b/t/unit/worker/test_request.py @@ -15,7 +15,8 @@ from celery.app.trace import (TraceInfo, build_tracer, fast_trace_task, mro_lookup, reset_worker_optimizations, setup_worker_optimizations, trace_task, trace_task_ret) from celery.backends.base import BaseDictBackend -from celery.exceptions import Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, WorkerLostError +from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry, TaskRevokedError, Terminated, + TimeLimitExceeded, WorkerLostError) from celery.signals import task_failure, task_retry, task_revoked from celery.worker import request as module from celery.worker import strategy @@ -398,6 +399,45 @@ def test_on_failure_WorkerLostError_redelivered_True(self): request=req._context, store_result=True) + def test_on_failure_TimeLimitExceeded_acks(self): + try: + raise TimeLimitExceeded() + except TimeLimitExceeded: + einfo = ExceptionInfo(internal=True) + + req = self.get_request(self.add.s(2, 2)) + req.task.acks_late = True + req.task.acks_on_failure_or_timeout = True + req.delivery_info['redelivered'] = False + req.task.backend = Mock() + + req.on_failure(einfo) + + req.on_ack.assert_called_with( + req_logger, req.connection_errors) + req.task.backend.mark_as_failure.assert_called_once_with(req.id, + einfo.exception.exc, + request=req._context, + store_result=True) + + def test_on_failure_TimeLimitExceeded_rejects_with_requeue(self): + try: + raise TimeLimitExceeded() + except TimeLimitExceeded: + einfo = ExceptionInfo(internal=True) + + req = self.get_request(self.add.s(2, 2)) + req.task.acks_late = True + req.task.acks_on_failure_or_timeout = False + req.delivery_info['redelivered'] = False + req.task.backend = Mock() + + req.on_failure(einfo) + + req.on_reject.assert_called_with( + req_logger, req.connection_errors, True) + req.task.backend.mark_as_failure.assert_not_called() + def test_tzlocal_is_cached(self): req = self.get_request(self.add.s(2, 2)) req._tzlocal = 'foo' From 843b6f2c1c139bbef2348ea17241436281e07b84 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 25 Mar 2025 00:15:59 +0200 Subject: [PATCH 0986/1051] Lock Kombu to v5.5.x (using urllib3 instead of pycurl) (#9632) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 64a26280af6..c456feee5cd 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.0,<6.0 +kombu>=5.5.0,<5.6 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 0436c551b8bdfb83551de03136171e8f67cab04a Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 25 Mar 2025 00:16:57 +0200 Subject: [PATCH 0987/1051] Lock pytest-celery to v1.2.x (using urllib3 instead of pycurl) (#9633) --- requirements/extras/pytest.txt | 2 +- requirements/test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/extras/pytest.txt b/requirements/extras/pytest.txt index 01972fb128f..01fe3ab8c5e 100644 --- a/requirements/extras/pytest.txt +++ b/requirements/extras/pytest.txt @@ -1 +1 @@ -pytest-celery[all]>=1.2.0 +pytest-celery[all]>=1.2.0,<1.3.0 diff --git a/requirements/test.txt b/requirements/test.txt index 1cb76e1c8d0..527d975f617 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,5 +1,5 @@ pytest==8.3.5 -pytest-celery[all]>=1.2.0 +pytest-celery[all]>=1.2.0,<1.3.0 pytest-rerunfailures>=14.0,<15.0; python_version >= "3.8" and python_version < "3.9" pytest-rerunfailures>=15.0; python_version >= "3.9" and python_version < "4.0" pytest-subtests<0.14.0; python_version < "3.9" From 4e2faced5252ef77cf720b92be9789498406c34f Mon Sep 17 00:00:00 2001 From: Jerry Feng Date: Tue, 25 Mar 2025 13:22:46 -0400 Subject: [PATCH 0988/1051] Add Codecov Test Analytics --- .github/workflows/python-package.yml | 6 ++++++ tox.ini | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 7269abe857c..f503f78bb33 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -77,6 +77,12 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} verbose: true # optional (default = false) + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + Integration: needs: - Unit diff --git a/tox.ini b/tox.ini index 55f80bd167d..2b5fdfcfb57 100644 --- a/tox.ini +++ b/tox.ini @@ -44,7 +44,7 @@ deps= bandit: bandit commands = - unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --cov-report term {posargs} + unit: pytest -vv --maxfail=10 --capture=no -v --cov=celery --cov-report=xml --junitxml=junit.xml -o junit_family=legacy --cov-report term {posargs} integration: pytest -xsvv t/integration {posargs} smoke: pytest -xsvv t/smoke --dist=loadscope --reruns 5 --reruns-delay 10 {posargs} setenv = From c05269a694385117721916b2bf2c56fad657eab1 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 31 Mar 2025 01:12:52 +0300 Subject: [PATCH 0989/1051] Bump Kombu to v5.5.2 (#9643) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index c456feee5cd..fc85b911128 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.0,<5.6 +kombu>=5.5.2,<5.6 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From d1c35bbdf014f13f4ab698d75e3ea381a017b090 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 31 Mar 2025 23:18:17 +0300 Subject: [PATCH 0990/1051] Prepare for release: v5.5.0 (#9644) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added Changelog for v5.5.0 * Bump version: 5.5.0rc5 → 5.5.0 --- .bumpversion.cfg | 2 +- Changelog.rst | 503 +++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 503 +++++++++++++++++++++++++++++++++ docs/history/whatsnew-5.5.rst | 173 ++++++++---- docs/includes/introduction.txt | 2 +- 7 files changed, 1126 insertions(+), 61 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4baba791b06..0f6b53cfb9f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0rc5 +current_version = 5.5.0 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index bbb0ee81802..d1c26827287 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,509 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0: + +5.5.0 +===== + +:release-date: 2025-03-31 +:release-by: Tomer Nosrati + +Celery v5.5.0 is now available. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` for a complete overview or read the main highlights below. + +Redis Broker Stability Improvements +----------------------------------- + +Long-standing disconnection issues with the Redis broker have been identified and +resolved in Kombu 5.5.0, which is included with this release. These improvements +significantly enhance stability when using Redis as a broker. + +Additionally, the Redis backend now has better exception handling with the new +``exception_safe_to_retry`` feature, which improves resilience during temporary +Redis connection issues. See :ref:`conf-redis-result-backend` for complete +documentation. + +Contributed by `@drienkop `_ in +`#9614 `_. + +``pycurl`` replaced with ``urllib3`` +------------------------------------ + +Replaced the :pypi:`pycurl` dependency with :pypi:`urllib3`. + +We're monitoring the performance impact of this change and welcome feedback from users +who notice any significant differences in their environments. + +Contributed by `@spawn-guy `_ in Kombu +`#2134 `_ and integrated in Celery via +`#9526 `_. + +RabbitMQ Quorum Queues Support +------------------------------ + +Added support for RabbitMQ's new `Quorum Queues `_ +feature, including compatibility with ETA tasks. This implementation has some limitations compared +to classic queues, so please refer to the documentation for details. + +`Native Delayed Delivery `_ +is automatically enabled when quorum queues are detected to implement the ETA mechanism. + +See :ref:`using-quorum-queues` for complete documentation. + +Configuration options: + +- :setting:`broker_native_delayed_delivery_queue_type`: Specifies the queue type for + delayed delivery (default: ``quorum``) +- :setting:`task_default_queue_type`: Sets the default queue type for tasks + (default: ``classic``) +- :setting:`worker_detect_quorum_queues`: Controls automatic detection of quorum + queues (default: ``True``) + +Contributed in `#9207 `_, +`#9121 `_, and +`#9599 `_. + +For details regarding the 404 errors, see +`New Year's Security Incident `_. + +Soft Shutdown Mechanism +----------------------- + +Soft shutdown is a time limited warm shutdown, initiated just before the cold shutdown. +The worker will allow :setting:`worker_soft_shutdown_timeout` seconds for all currently +executing tasks to finish before it terminates. If the time limit is reached, the worker +will initiate a cold shutdown and cancel all currently executing tasks. + +This feature is particularly valuable when using brokers with visibility timeout +mechanisms, such as Redis or SQS. It allows the worker enough time to re-queue +tasks that were not completed before exiting, preventing task loss during worker +shutdown. + +See :ref:`worker-stopping` for complete documentation on worker shutdown types. + +Configuration options: + +- :setting:`worker_soft_shutdown_timeout`: Sets the duration in seconds for the soft + shutdown period (default: ``0.0``, disabled) +- :setting:`worker_enable_soft_shutdown_on_idle`: Controls whether soft shutdown + should be enabled even when the worker is idle (default: ``False``) + +Contributed by `@Nusnus `_ in +`#9213 `_, +`#9231 `_, and +`#9238 `_. + +Pydantic Support +---------------- + +New native support for Pydantic models in tasks. This integration +allows you to leverage Pydantic's powerful data validation and serialization +capabilities directly in your Celery tasks. + +Example usage: + +.. code-block:: python + + from pydantic import BaseModel + from celery import Celery + + app = Celery('tasks') + + class ArgModel(BaseModel): + value: int + + class ReturnModel(BaseModel): + value: str + + @app.task(pydantic=True) + def x(arg: ArgModel) -> ReturnModel: + # args/kwargs type hinted as Pydantic model will be converted + assert isinstance(arg, ArgModel) + + # The returned model will be converted to a dict automatically + return ReturnModel(value=f"example: {arg.value}") + +See :ref:`task-pydantic` for complete documentation. + +Configuration options: + +- ``pydantic=True``: Enables Pydantic integration for the task +- ``pydantic_strict=True/False``: Controls whether strict validation is enabled + (default: ``False``) +- ``pydantic_context={...}``: Provides additional context for validation +- ``pydantic_dump_kwargs={...}``: Customizes serialization behavior + +Contributed by `@mathiasertl `_ in +`#9023 `_, +`#9319 `_, and +`#9393 `_. + +Google Pub/Sub Transport +------------------------ + +New support for Google Cloud Pub/Sub as a message transport, expanding +Celery's cloud integration options. + +See :ref:`broker-gcpubsub` for complete documentation. + +For the Google Pub/Sub support you have to install additional dependencies: + +.. code-block:: console + + $ pip install "celery[gcpubsub]" + +Then configure your Celery application to use the Google Pub/Sub transport: + +.. code-block:: python + + broker_url = 'gcpubsub://projects/project-id' + +Contributed by `@haimjether `_ in +`#9351 `_. + +Python 3.13 Support +------------------- + +Official support for Python 3.13. All core dependencies have been +updated to ensure compatibility, including Kombu and py-amqp. + +This release maintains compatibility with Python 3.8 through 3.13, as well as +PyPy 3.10+. + +Contributed by `@Nusnus `_ in +`#9309 `_ and +`#9350 `_. + +REMAP_SIGTERM Support +--------------------- + +The "REMAP_SIGTERM" feature, previously undocumented, has been tested, documented, +and is now officially supported. This feature allows you to remap the SIGTERM +signal to SIGQUIT, enabling you to initiate a soft or cold shutdown using TERM +instead of QUIT. + +This is particularly useful in containerized environments where SIGTERM is the +standard signal for graceful termination. + +See :ref:`Cold Shutdown documentation ` for more info. + +To enable this feature, set the environment variable: + +.. code-block:: bash + + export REMAP_SIGTERM="SIGQUIT" + +Contributed by `@Nusnus `_ in +`#9461 `_. + +Database Backend Improvements +----------------------------- + +New ``create_tables_at_setup`` option for the database +backend. This option controls when database tables are created, allowing for +non-lazy table creation. + +By default (``create_tables_at_setup=True``), tables are created during backend +initialization. Setting this to ``False`` defers table creation until they are +actually needed, which can be useful in certain deployment scenarios where you want +more control over database schema management. + +See :ref:`conf-database-result-backend` for complete documentation. + +Configuration: + +.. code-block:: python + + app.conf.result_backend = 'db+sqlite:///results.db' + app.conf.database_create_tables_at_setup = False + +Contributed by `@MarcBresson `_ in +`#9228 `_. + +What's Changed +~~~~~~~~~~~~~~ + +- (docs): use correct version celery v.5.4.x (#8975) +- Update mypy to 1.10.0 (#8977) +- Limit pymongo<4.7 when Python <= 3.10 due to breaking changes in 4.7 (#8988) +- Bump pytest from 8.1.1 to 8.2.0 (#8987) +- Update README to Include FastAPI in Framework Integration Section (#8978) +- Clarify return values of ..._on_commit methods (#8984) +- add kafka broker docs (#8935) +- Limit pymongo<4.7 regardless of Python version (#8999) +- Update pymongo[srv] requirement from <4.7,>=4.0.2 to >=4.0.2,<4.8 (#9000) +- Update elasticsearch requirement from <=8.13.0 to <=8.13.1 (#9004) +- security: SecureSerializer: support generic low-level serializers (#8982) +- don't kill if pid same as file (#8997) (#8998) +- Update cryptography to 42.0.6 (#9005) +- Bump cryptography from 42.0.6 to 42.0.7 (#9009) +- don't kill if pid same as file (#8997) (#8998) (#9007) +- Added -vv to unit, integration and smoke tests (#9014) +- SecuritySerializer: ensure pack separator will not be conflicted with serialized fields (#9010) +- Update sphinx-click to 5.2.2 (#9025) +- Bump sphinx-click from 5.2.2 to 6.0.0 (#9029) +- Fix a typo to display the help message in first-steps-with-django (#9036) +- Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039) +- Fix certificate validity check (#9037) +- Revert "Pinned requests to v2.31.0 due to docker-py bug #3256" (#9043) +- Bump pytest from 8.2.0 to 8.2.1 (#9035) +- Update elasticsearch requirement from <=8.13.1 to <=8.13.2 (#9045) +- Fix detection of custom task set as class attribute with Django (#9038) +- Update elastic-transport requirement from <=8.13.0 to <=8.13.1 (#9050) +- Bump pycouchdb from 1.14.2 to 1.16.0 (#9052) +- Update pytest to 8.2.2 (#9060) +- Bump cryptography from 42.0.7 to 42.0.8 (#9061) +- Update elasticsearch requirement from <=8.13.2 to <=8.14.0 (#9069) +- [enhance feature] Crontab schedule: allow using month names (#9068) +- Enhance tox environment: [testenv:clean] (#9072) +- Clarify docs about Reserve one task at a time (#9073) +- GCS docs fixes (#9075) +- Use hub.remove_writer instead of hub.remove for write fds (#4185) (#9055) +- Class method to process crontab string (#9079) +- Fixed smoke tests env bug when using integration tasks that rely on Redis (#9090) +- Bugfix - a task will run multiple times when chaining chains with groups (#9021) +- Bump mypy from 1.10.0 to 1.10.1 (#9096) +- Don't add a separator to global_keyprefix if it already has one (#9080) +- Update pymongo[srv] requirement from <4.8,>=4.0.2 to >=4.0.2,<4.9 (#9111) +- Added missing import in examples for Django (#9099) +- Bump Kombu to v5.4.0rc1 (#9117) +- Removed skipping Redis in t/smoke/tests/test_consumer.py tests (#9118) +- Update pytest-subtests to 0.13.0 (#9120) +- Increased smoke tests CI timeout (#9122) +- Bump Kombu to v5.4.0rc2 (#9127) +- Update zstandard to 0.23.0 (#9129) +- Update pytest-subtests to 0.13.1 (#9130) +- Changed retry to tenacity in smoke tests (#9133) +- Bump mypy from 1.10.1 to 1.11.0 (#9135) +- Update cryptography to 43.0.0 (#9138) +- Update pytest to 8.3.1 (#9137) +- Added support for Quorum Queues (#9121) +- Bump Kombu to v5.4.0rc3 (#9139) +- Cleanup in Changelog.rst (#9141) +- Update Django docs for CELERY_CACHE_BACKEND (#9143) +- Added missing docs to previous releases (#9144) +- Fixed a few documentation build warnings (#9145) +- docs(README): link invalid (#9148) +- Prepare for (pre) release: v5.5.0b1 (#9146) +- Bump pytest from 8.3.1 to 8.3.2 (#9153) +- Remove setuptools deprecated test command from setup.py (#9159) +- Pin pre-commit to latest version 3.8.0 from Python 3.9 (#9156) +- Bump mypy from 1.11.0 to 1.11.1 (#9164) +- Change "docker-compose" to "docker compose" in Makefile (#9169) +- update python versions and docker compose (#9171) +- Add support for Pydantic model validation/serialization (fixes #8751) (#9023) +- Allow local dynamodb to be installed on another host than localhost (#8965) +- Terminate job implementation for gevent concurrency backend (#9083) +- Bump Kombu to v5.4.0 (#9177) +- Add check for soft_time_limit and time_limit values (#9173) +- Prepare for (pre) release: v5.5.0b2 (#9178) +- Added SQS (localstack) broker to canvas smoke tests (#9179) +- Pin elastic-transport to <= latest version 8.15.0 (#9182) +- Update elasticsearch requirement from <=8.14.0 to <=8.15.0 (#9186) +- improve formatting (#9188) +- Add basic helm chart for celery (#9181) +- Update kafka.rst (#9194) +- Update pytest-order to 1.3.0 (#9198) +- Update mypy to 1.11.2 (#9206) +- all added to routes (#9204) +- Fix typos discovered by codespell (#9212) +- Use tzdata extras with zoneinfo backports (#8286) +- Use `docker compose` in Contributing's doc build section (#9219) +- Failing test for issue #9119 (#9215) +- Fix date_done timezone issue (#8385) +- CI Fixes to smoke tests (#9223) +- fix: passes current request context when pushing to request_stack (#9208) +- Fix broken link in the Using RabbitMQ docs page (#9226) +- Added Soft Shutdown Mechanism (#9213) +- Added worker_enable_soft_shutdown_on_idle (#9231) +- Bump cryptography from 43.0.0 to 43.0.1 (#9233) +- Added docs regarding the relevancy of soft shutdown and ETA tasks (#9238) +- Show broker_connection_retry_on_startup warning only if it evaluates as False (#9227) +- Fixed docker-docs CI failure (#9240) +- Added docker cleanup auto-fixture to improve smoke tests stability (#9243) +- print is not thread-safe, so should not be used in signal handler (#9222) +- Prepare for (pre) release: v5.5.0b3 (#9244) +- Correct the error description in exception message when validate soft_time_limit (#9246) +- Update msgpack to 1.1.0 (#9249) +- chore(utils/time.py): rename `_is_ambigious` -> `_is_ambiguous` (#9248) +- Reduced Smoke Tests to min/max supported python (3.8/3.12) (#9252) +- Update pytest to 8.3.3 (#9253) +- Update elasticsearch requirement from <=8.15.0 to <=8.15.1 (#9255) +- update mongodb without deprecated `[srv]` extra requirement (#9258) +- blacksmith.sh: Migrate workflows to Blacksmith (#9261) +- Fixes #9119: inject dispatch_uid for retry-wrapped receivers (#9247) +- Run all smoke tests CI jobs together (#9263) +- Improve documentation on visibility timeout (#9264) +- Bump pytest-celery to 1.1.2 (#9267) +- Added missing "app.conf.visibility_timeout" in smoke tests (#9266) +- Improved stability with t/smoke/tests/test_consumer.py (#9268) +- Improved Redis container stability in the smoke tests (#9271) +- Disabled EXHAUST_MEMORY tests in Smoke-tasks (#9272) +- Marked xfail for test_reducing_prefetch_count with Redis - flaky test (#9273) +- Fixed pypy unit tests random failures in the CI (#9275) +- Fixed more pypy unit tests random failures in the CI (#9278) +- Fix Redis container from aborting randomly (#9276) +- Run Integration & Smoke CI tests together after unit tests passes (#9280) +- Added "loglevel verbose" to Redis containers in smoke tests (#9282) +- Fixed Redis error in the smoke tests: "Possible SECURITY ATTACK detected" (#9284) +- Refactored the smoke tests github workflow (#9285) +- Increased --reruns 3->4 in smoke tests (#9286) +- Improve stability of smoke tests (CI and Local) (#9287) +- Fixed Smoke tests CI "test-case" lables (specific instead of general) (#9288) +- Use assert_log_exists instead of wait_for_log in worker smoke tests (#9290) +- Optimized t/smoke/tests/test_worker.py (#9291) +- Enable smoke tests dockers check before each test starts (#9292) +- Relaxed smoke tests flaky tests mechanism (#9293) +- Updated quorum queue detection to handle multiple broker instances (#9294) +- Non-lazy table creation for database backend (#9228) +- Pin pymongo to latest version 4.9 (#9297) +- Bump pymongo from 4.9 to 4.9.1 (#9298) +- Bump Kombu to v5.4.2 (#9304) +- Use rabbitmq:3 in stamping smoke tests (#9307) +- Bump pytest-celery to 1.1.3 (#9308) +- Added Python 3.13 Support (#9309) +- Add log when global qos is disabled (#9296) +- Added official release docs (whatsnew) for v5.5 (#9312) +- Enable Codespell autofix (#9313) +- Pydantic typehints: Fix optional, allow generics (#9319) +- Prepare for (pre) release: v5.5.0b4 (#9322) +- Added Blacksmith.sh to the Sponsors section in the README (#9323) +- Revert "Added Blacksmith.sh to the Sponsors section in the README" (#9324) +- Added Blacksmith.sh to the Sponsors section in the README (#9325) +- Added missing " |oc-sponsor-3|” in README (#9326) +- Use Blacksmith SVG logo (#9327) +- Updated Blacksmith SVG logo (#9328) +- Revert "Updated Blacksmith SVG logo" (#9329) +- Update pymongo to 4.10.0 (#9330) +- Update pymongo to 4.10.1 (#9332) +- Update user guide to recommend delay_on_commit (#9333) +- Pin pre-commit to latest version 4.0.0 (Python 3.9+) (#9334) +- Update ephem to 4.1.6 (#9336) +- Updated Blacksmith SVG logo (#9337) +- Prepare for (pre) release: v5.5.0rc1 (#9341) +- Fix: Treat dbm.error as a corrupted schedule file (#9331) +- Pin pre-commit to latest version 4.0.1 (#9343) +- Added Python 3.13 to Dockerfiles (#9350) +- Skip test_pool_restart_import_modules on PyPy due to test issue (#9352) +- Update elastic-transport requirement from <=8.15.0 to <=8.15.1 (#9347) +- added dragonfly logo (#9353) +- Update README.rst (#9354) +- Update README.rst (#9355) +- Update mypy to 1.12.0 (#9356) +- Bump Kombu to v5.5.0rc1 (#9357) +- Fix `celery --loader` option parsing (#9361) +- Add support for Google Pub/Sub transport (#9351) +- Add native incr support for GCSBackend (#9302) +- fix(perform_pending_operations): prevent task duplication on shutdown… (#9348) +- Update grpcio to 1.67.0 (#9365) +- Update google-cloud-firestore to 2.19.0 (#9364) +- Annotate celery/utils/timer2.py (#9362) +- Update cryptography to 43.0.3 (#9366) +- Update mypy to 1.12.1 (#9368) +- Bump mypy from 1.12.1 to 1.13.0 (#9373) +- Pass timeout and confirm_timeout to producer.publish() (#9374) +- Bump Kombu to v5.5.0rc2 (#9382) +- Bump pytest-cov from 5.0.0 to 6.0.0 (#9388) +- default strict to False for pydantic tasks (#9393) +- Only log that global QoS is disabled if using amqp (#9395) +- chore: update sponsorship logo (#9398) +- Allow custom hostname for celery_worker in celery.contrib.pytest / celery.contrib.testing.worker (#9405) +- Removed docker-docs from CI (optional job, malfunctioning) (#9406) +- Added a utility to format changelogs from the auto-generated GitHub release notes (#9408) +- Bump codecov/codecov-action from 4 to 5 (#9412) +- Update elasticsearch requirement from <=8.15.1 to <=8.16.0 (#9410) +- Native Delayed Delivery in RabbitMQ (#9207) +- Prepare for (pre) release: v5.5.0rc2 (#9416) +- Document usage of broker_native_delayed_delivery_queue_type (#9419) +- Adjust section in what's new document regarding quorum queues support (#9420) +- Update pytest-rerunfailures to 15.0 (#9422) +- Document group unrolling (#9421) +- fix small typo acces -> access (#9434) +- Update cryptography to 44.0.0 (#9437) +- Added pypy to Dockerfile (#9438) +- Skipped flaky tests on pypy (all pass after ~10 reruns) (#9439) +- Allowing managed credentials for azureblockblob (#9430) +- Allow passing Celery objects to the Click entry point (#9426) +- support Request termination for gevent (#9440) +- Prevent event_mask from being overwritten. (#9432) +- Update pytest to 8.3.4 (#9444) +- Prepare for (pre) release: v5.5.0rc3 (#9450) +- Bugfix: SIGQUIT not initiating cold shutdown when `task_acks_late=False` (#9461) +- Fixed pycurl dep with Python 3.8 (#9471) +- Update elasticsearch requirement from <=8.16.0 to <=8.17.0 (#9469) +- Bump pytest-subtests from 0.13.1 to 0.14.1 (#9459) +- documentation: Added a type annotation to the periodic task example (#9473) +- Prepare for (pre) release: v5.5.0rc4 (#9474) +- Bump mypy from 1.13.0 to 1.14.0 (#9476) +- Fix cassandra backend port settings not working (#9465) +- Unroll group when a group with a single item is chained using the | operator (#9456) +- fix(django): catch the right error when trying to close db connection (#9392) +- Replacing a task with a chain which contains a group now returns a result instead of hanging (#9484) +- Avoid using a group of one as it is now unrolled into a chain (#9510) +- Link to the correct IRC network (#9509) +- Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504) +- Update canvas.rst to fix output result from chain object (#9502) +- Unauthorized Changes Cleanup (#9528) +- [RE-APPROVED] fix(django): catch the right error when trying to close db connection (#9529) +- [RE-APPROVED] Link to the correct IRC network (#9531) +- [RE-APPROVED] Update canvas.rst to fix output result from chain object (#9532) +- Update test-ci-base.txt (#9539) +- Update install-pyenv.sh (#9540) +- Update elasticsearch requirement from <=8.17.0 to <=8.17.1 (#9518) +- Bump google-cloud-firestore from 2.19.0 to 2.20.0 (#9493) +- Bump mypy from 1.14.0 to 1.14.1 (#9483) +- Update elastic-transport requirement from <=8.15.1 to <=8.17.0 (#9490) +- Update Dockerfile by adding missing Python version 3.13 (#9549) +- Fix typo for default of sig (#9495) +- fix(crontab): resolve constructor type conflicts (#9551) +- worker_max_memory_per_child: kilobyte is 1024 bytes (#9553) +- Fix formatting in quorum queue docs (#9555) +- Bump cryptography from 44.0.0 to 44.0.1 (#9556) +- Fix the send_task method when detecting if the native delayed delivery approach is available (#9552) +- Reverted PR #7814 & minor code improvement (#9494) +- Improved donation and sponsorship visibility (#9558) +- Updated the Getting Help section, replacing deprecated with new resources (#9559) +- Fixed django example (#9562) +- Bump Kombu to v5.5.0rc3 (#9564) +- Bump ephem from 4.1.6 to 4.2 (#9565) +- Bump pytest-celery to v1.2.0 (#9568) +- Remove dependency on `pycurl` (#9526) +- Set TestWorkController.__test__ (#9574) +- Fixed bug when revoking by stamped headers a stamp that does not exist (#9575) +- Canvas Stamping Doc Fixes (#9578) +- Bugfix: Chord with a chord in header doesn't invoke error callback on inner chord header failure (default config) (#9580) +- Prepare for (pre) release: v5.5.0rc5 (#9582) +- Bump google-cloud-firestore from 2.20.0 to 2.20.1 (#9584) +- Fix tests with Click 8.2 (#9590) +- Bump cryptography from 44.0.1 to 44.0.2 (#9591) +- Update elasticsearch requirement from <=8.17.1 to <=8.17.2 (#9594) +- Bump pytest from 8.3.4 to 8.3.5 (#9598) +- Refactored and Enhanced DelayedDelivery bootstep (#9599) +- Improve docs about acks_on_failure_or_timeout (#9577) +- Update SECURITY.md (#9609) +- remove flake8plus as not needed anymore (#9610) +- remove [bdist_wheel] universal = 0 from setup.cfg as not needed (#9611) +- remove importlib-metadata as not needed in python3.8 anymore (#9612) +- feat: define exception_safe_to_retry for redisbackend (#9614) +- Bump Kombu to v5.5.0 (#9615) +- Update elastic-transport requirement from <=8.17.0 to <=8.17.1 (#9616) +- [docs] fix first-steps (#9618) +- Revert "Improve docs about acks_on_failure_or_timeout" (#9606) +- Improve CI stability and performance (#9624) +- Improved explanation for Database transactions at user guide for tasks (#9617) +- update tests to use python 3.8 codes only (#9627) +- #9597: Ensure surpassing Hard Timeout limit when task_acks_on_failure_or_timeout is False rejects the task (#9626) +- Lock Kombu to v5.5.x (using urllib3 instead of pycurl) (#9632) +- Lock pytest-celery to v1.2.x (using urllib3 instead of pycurl) (#9633) +- Add Codecov Test Analytics (#9635) +- Bump Kombu to v5.5.2 (#9643) +- Prepare for release: v5.5.0 (#9644) + .. _version-5.5.0rc5: 5.5.0rc5 diff --git a/README.rst b/README.rst index 716d12c9f24..f55d7393251 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0rc5 (immunity) +:Version: 5.5.0 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index dfecfd72c19..2b2459633c0 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0rc5' +__version__ = '5.5.0' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index a8042cd7a06..4d8c1a8c147 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,509 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.0: + +5.5.0 +===== + +:release-date: 2025-03-31 +:release-by: Tomer Nosrati + +Celery v5.5.0 is now available. + +Key Highlights +~~~~~~~~~~~~~~ + +See :ref:`whatsnew-5.5` for a complete overview or read the main highlights below. + +Redis Broker Stability Improvements +----------------------------------- + +Long-standing disconnection issues with the Redis broker have been identified and +resolved in Kombu 5.5.0, which is included with this release. These improvements +significantly enhance stability when using Redis as a broker. + +Additionally, the Redis backend now has better exception handling with the new +``exception_safe_to_retry`` feature, which improves resilience during temporary +Redis connection issues. See :ref:`conf-redis-result-backend` for complete +documentation. + +Contributed by `@drienkop `_ in +`#9614 `_. + +``pycurl`` replaced with ``urllib3`` +------------------------------------ + +Replaced the :pypi:`pycurl` dependency with :pypi:`urllib3`. + +We're monitoring the performance impact of this change and welcome feedback from users +who notice any significant differences in their environments. + +Contributed by `@spawn-guy `_ in Kombu +`#2134 `_ and integrated in Celery via +`#9526 `_. + +RabbitMQ Quorum Queues Support +------------------------------ + +Added support for RabbitMQ's new `Quorum Queues `_ +feature, including compatibility with ETA tasks. This implementation has some limitations compared +to classic queues, so please refer to the documentation for details. + +`Native Delayed Delivery `_ +is automatically enabled when quorum queues are detected to implement the ETA mechanism. + +See :ref:`using-quorum-queues` for complete documentation. + +Configuration options: + +- :setting:`broker_native_delayed_delivery_queue_type`: Specifies the queue type for + delayed delivery (default: ``quorum``) +- :setting:`task_default_queue_type`: Sets the default queue type for tasks + (default: ``classic``) +- :setting:`worker_detect_quorum_queues`: Controls automatic detection of quorum + queues (default: ``True``) + +Contributed in `#9207 `_, +`#9121 `_, and +`#9599 `_. + +For details regarding the 404 errors, see +`New Year's Security Incident `_. + +Soft Shutdown Mechanism +----------------------- + +Soft shutdown is a time limited warm shutdown, initiated just before the cold shutdown. +The worker will allow :setting:`worker_soft_shutdown_timeout` seconds for all currently +executing tasks to finish before it terminates. If the time limit is reached, the worker +will initiate a cold shutdown and cancel all currently executing tasks. + +This feature is particularly valuable when using brokers with visibility timeout +mechanisms, such as Redis or SQS. It allows the worker enough time to re-queue +tasks that were not completed before exiting, preventing task loss during worker +shutdown. + +See :ref:`worker-stopping` for complete documentation on worker shutdown types. + +Configuration options: + +- :setting:`worker_soft_shutdown_timeout`: Sets the duration in seconds for the soft + shutdown period (default: ``0.0``, disabled) +- :setting:`worker_enable_soft_shutdown_on_idle`: Controls whether soft shutdown + should be enabled even when the worker is idle (default: ``False``) + +Contributed by `@Nusnus `_ in +`#9213 `_, +`#9231 `_, and +`#9238 `_. + +Pydantic Support +---------------- + +New native support for Pydantic models in tasks. This integration +allows you to leverage Pydantic's powerful data validation and serialization +capabilities directly in your Celery tasks. + +Example usage: + +.. code-block:: python + + from pydantic import BaseModel + from celery import Celery + + app = Celery('tasks') + + class ArgModel(BaseModel): + value: int + + class ReturnModel(BaseModel): + value: str + + @app.task(pydantic=True) + def x(arg: ArgModel) -> ReturnModel: + # args/kwargs type hinted as Pydantic model will be converted + assert isinstance(arg, ArgModel) + + # The returned model will be converted to a dict automatically + return ReturnModel(value=f"example: {arg.value}") + +See :ref:`task-pydantic` for complete documentation. + +Configuration options: + +- ``pydantic=True``: Enables Pydantic integration for the task +- ``pydantic_strict=True/False``: Controls whether strict validation is enabled + (default: ``False``) +- ``pydantic_context={...}``: Provides additional context for validation +- ``pydantic_dump_kwargs={...}``: Customizes serialization behavior + +Contributed by `@mathiasertl `_ in +`#9023 `_, +`#9319 `_, and +`#9393 `_. + +Google Pub/Sub Transport +------------------------ + +New support for Google Cloud Pub/Sub as a message transport, expanding +Celery's cloud integration options. + +See :ref:`broker-gcpubsub` for complete documentation. + +For the Google Pub/Sub support you have to install additional dependencies: + +.. code-block:: console + + $ pip install "celery[gcpubsub]" + +Then configure your Celery application to use the Google Pub/Sub transport: + +.. code-block:: python + + broker_url = 'gcpubsub://projects/project-id' + +Contributed by `@haimjether `_ in +`#9351 `_. + +Python 3.13 Support +------------------- + +Official support for Python 3.13. All core dependencies have been +updated to ensure compatibility, including Kombu and py-amqp. + +This release maintains compatibility with Python 3.8 through 3.13, as well as +PyPy 3.10+. + +Contributed by `@Nusnus `_ in +`#9309 `_ and +`#9350 `_. + +REMAP_SIGTERM Support +--------------------- + +The "REMAP_SIGTERM" feature, previously undocumented, has been tested, documented, +and is now officially supported. This feature allows you to remap the SIGTERM +signal to SIGQUIT, enabling you to initiate a soft or cold shutdown using TERM +instead of QUIT. + +This is particularly useful in containerized environments where SIGTERM is the +standard signal for graceful termination. + +See :ref:`Cold Shutdown documentation ` for more info. + +To enable this feature, set the environment variable: + +.. code-block:: bash + + export REMAP_SIGTERM="SIGQUIT" + +Contributed by `@Nusnus `_ in +`#9461 `_. + +Database Backend Improvements +----------------------------- + +New ``create_tables_at_setup`` option for the database +backend. This option controls when database tables are created, allowing for +non-lazy table creation. + +By default (``create_tables_at_setup=True``), tables are created during backend +initialization. Setting this to ``False`` defers table creation until they are +actually needed, which can be useful in certain deployment scenarios where you want +more control over database schema management. + +See :ref:`conf-database-result-backend` for complete documentation. + +Configuration: + +.. code-block:: python + + app.conf.result_backend = 'db+sqlite:///results.db' + app.conf.database_create_tables_at_setup = False + +Contributed by `@MarcBresson `_ in +`#9228 `_. + +What's Changed +~~~~~~~~~~~~~~ + +- (docs): use correct version celery v.5.4.x (#8975) +- Update mypy to 1.10.0 (#8977) +- Limit pymongo<4.7 when Python <= 3.10 due to breaking changes in 4.7 (#8988) +- Bump pytest from 8.1.1 to 8.2.0 (#8987) +- Update README to Include FastAPI in Framework Integration Section (#8978) +- Clarify return values of ..._on_commit methods (#8984) +- add kafka broker docs (#8935) +- Limit pymongo<4.7 regardless of Python version (#8999) +- Update pymongo[srv] requirement from <4.7,>=4.0.2 to >=4.0.2,<4.8 (#9000) +- Update elasticsearch requirement from <=8.13.0 to <=8.13.1 (#9004) +- security: SecureSerializer: support generic low-level serializers (#8982) +- don't kill if pid same as file (#8997) (#8998) +- Update cryptography to 42.0.6 (#9005) +- Bump cryptography from 42.0.6 to 42.0.7 (#9009) +- don't kill if pid same as file (#8997) (#8998) (#9007) +- Added -vv to unit, integration and smoke tests (#9014) +- SecuritySerializer: ensure pack separator will not be conflicted with serialized fields (#9010) +- Update sphinx-click to 5.2.2 (#9025) +- Bump sphinx-click from 5.2.2 to 6.0.0 (#9029) +- Fix a typo to display the help message in first-steps-with-django (#9036) +- Pinned requests to v2.31.0 due to docker-py bug #3256 (#9039) +- Fix certificate validity check (#9037) +- Revert "Pinned requests to v2.31.0 due to docker-py bug #3256" (#9043) +- Bump pytest from 8.2.0 to 8.2.1 (#9035) +- Update elasticsearch requirement from <=8.13.1 to <=8.13.2 (#9045) +- Fix detection of custom task set as class attribute with Django (#9038) +- Update elastic-transport requirement from <=8.13.0 to <=8.13.1 (#9050) +- Bump pycouchdb from 1.14.2 to 1.16.0 (#9052) +- Update pytest to 8.2.2 (#9060) +- Bump cryptography from 42.0.7 to 42.0.8 (#9061) +- Update elasticsearch requirement from <=8.13.2 to <=8.14.0 (#9069) +- [enhance feature] Crontab schedule: allow using month names (#9068) +- Enhance tox environment: [testenv:clean] (#9072) +- Clarify docs about Reserve one task at a time (#9073) +- GCS docs fixes (#9075) +- Use hub.remove_writer instead of hub.remove for write fds (#4185) (#9055) +- Class method to process crontab string (#9079) +- Fixed smoke tests env bug when using integration tasks that rely on Redis (#9090) +- Bugfix - a task will run multiple times when chaining chains with groups (#9021) +- Bump mypy from 1.10.0 to 1.10.1 (#9096) +- Don't add a separator to global_keyprefix if it already has one (#9080) +- Update pymongo[srv] requirement from <4.8,>=4.0.2 to >=4.0.2,<4.9 (#9111) +- Added missing import in examples for Django (#9099) +- Bump Kombu to v5.4.0rc1 (#9117) +- Removed skipping Redis in t/smoke/tests/test_consumer.py tests (#9118) +- Update pytest-subtests to 0.13.0 (#9120) +- Increased smoke tests CI timeout (#9122) +- Bump Kombu to v5.4.0rc2 (#9127) +- Update zstandard to 0.23.0 (#9129) +- Update pytest-subtests to 0.13.1 (#9130) +- Changed retry to tenacity in smoke tests (#9133) +- Bump mypy from 1.10.1 to 1.11.0 (#9135) +- Update cryptography to 43.0.0 (#9138) +- Update pytest to 8.3.1 (#9137) +- Added support for Quorum Queues (#9121) +- Bump Kombu to v5.4.0rc3 (#9139) +- Cleanup in Changelog.rst (#9141) +- Update Django docs for CELERY_CACHE_BACKEND (#9143) +- Added missing docs to previous releases (#9144) +- Fixed a few documentation build warnings (#9145) +- docs(README): link invalid (#9148) +- Prepare for (pre) release: v5.5.0b1 (#9146) +- Bump pytest from 8.3.1 to 8.3.2 (#9153) +- Remove setuptools deprecated test command from setup.py (#9159) +- Pin pre-commit to latest version 3.8.0 from Python 3.9 (#9156) +- Bump mypy from 1.11.0 to 1.11.1 (#9164) +- Change "docker-compose" to "docker compose" in Makefile (#9169) +- update python versions and docker compose (#9171) +- Add support for Pydantic model validation/serialization (fixes #8751) (#9023) +- Allow local dynamodb to be installed on another host than localhost (#8965) +- Terminate job implementation for gevent concurrency backend (#9083) +- Bump Kombu to v5.4.0 (#9177) +- Add check for soft_time_limit and time_limit values (#9173) +- Prepare for (pre) release: v5.5.0b2 (#9178) +- Added SQS (localstack) broker to canvas smoke tests (#9179) +- Pin elastic-transport to <= latest version 8.15.0 (#9182) +- Update elasticsearch requirement from <=8.14.0 to <=8.15.0 (#9186) +- improve formatting (#9188) +- Add basic helm chart for celery (#9181) +- Update kafka.rst (#9194) +- Update pytest-order to 1.3.0 (#9198) +- Update mypy to 1.11.2 (#9206) +- all added to routes (#9204) +- Fix typos discovered by codespell (#9212) +- Use tzdata extras with zoneinfo backports (#8286) +- Use `docker compose` in Contributing's doc build section (#9219) +- Failing test for issue #9119 (#9215) +- Fix date_done timezone issue (#8385) +- CI Fixes to smoke tests (#9223) +- fix: passes current request context when pushing to request_stack (#9208) +- Fix broken link in the Using RabbitMQ docs page (#9226) +- Added Soft Shutdown Mechanism (#9213) +- Added worker_enable_soft_shutdown_on_idle (#9231) +- Bump cryptography from 43.0.0 to 43.0.1 (#9233) +- Added docs regarding the relevancy of soft shutdown and ETA tasks (#9238) +- Show broker_connection_retry_on_startup warning only if it evaluates as False (#9227) +- Fixed docker-docs CI failure (#9240) +- Added docker cleanup auto-fixture to improve smoke tests stability (#9243) +- print is not thread-safe, so should not be used in signal handler (#9222) +- Prepare for (pre) release: v5.5.0b3 (#9244) +- Correct the error description in exception message when validate soft_time_limit (#9246) +- Update msgpack to 1.1.0 (#9249) +- chore(utils/time.py): rename `_is_ambigious` -> `_is_ambiguous` (#9248) +- Reduced Smoke Tests to min/max supported python (3.8/3.12) (#9252) +- Update pytest to 8.3.3 (#9253) +- Update elasticsearch requirement from <=8.15.0 to <=8.15.1 (#9255) +- update mongodb without deprecated `[srv]` extra requirement (#9258) +- blacksmith.sh: Migrate workflows to Blacksmith (#9261) +- Fixes #9119: inject dispatch_uid for retry-wrapped receivers (#9247) +- Run all smoke tests CI jobs together (#9263) +- Improve documentation on visibility timeout (#9264) +- Bump pytest-celery to 1.1.2 (#9267) +- Added missing "app.conf.visibility_timeout" in smoke tests (#9266) +- Improved stability with t/smoke/tests/test_consumer.py (#9268) +- Improved Redis container stability in the smoke tests (#9271) +- Disabled EXHAUST_MEMORY tests in Smoke-tasks (#9272) +- Marked xfail for test_reducing_prefetch_count with Redis - flaky test (#9273) +- Fixed pypy unit tests random failures in the CI (#9275) +- Fixed more pypy unit tests random failures in the CI (#9278) +- Fix Redis container from aborting randomly (#9276) +- Run Integration & Smoke CI tests together after unit tests passes (#9280) +- Added "loglevel verbose" to Redis containers in smoke tests (#9282) +- Fixed Redis error in the smoke tests: "Possible SECURITY ATTACK detected" (#9284) +- Refactored the smoke tests github workflow (#9285) +- Increased --reruns 3->4 in smoke tests (#9286) +- Improve stability of smoke tests (CI and Local) (#9287) +- Fixed Smoke tests CI "test-case" lables (specific instead of general) (#9288) +- Use assert_log_exists instead of wait_for_log in worker smoke tests (#9290) +- Optimized t/smoke/tests/test_worker.py (#9291) +- Enable smoke tests dockers check before each test starts (#9292) +- Relaxed smoke tests flaky tests mechanism (#9293) +- Updated quorum queue detection to handle multiple broker instances (#9294) +- Non-lazy table creation for database backend (#9228) +- Pin pymongo to latest version 4.9 (#9297) +- Bump pymongo from 4.9 to 4.9.1 (#9298) +- Bump Kombu to v5.4.2 (#9304) +- Use rabbitmq:3 in stamping smoke tests (#9307) +- Bump pytest-celery to 1.1.3 (#9308) +- Added Python 3.13 Support (#9309) +- Add log when global qos is disabled (#9296) +- Added official release docs (whatsnew) for v5.5 (#9312) +- Enable Codespell autofix (#9313) +- Pydantic typehints: Fix optional, allow generics (#9319) +- Prepare for (pre) release: v5.5.0b4 (#9322) +- Added Blacksmith.sh to the Sponsors section in the README (#9323) +- Revert "Added Blacksmith.sh to the Sponsors section in the README" (#9324) +- Added Blacksmith.sh to the Sponsors section in the README (#9325) +- Added missing " |oc-sponsor-3|” in README (#9326) +- Use Blacksmith SVG logo (#9327) +- Updated Blacksmith SVG logo (#9328) +- Revert "Updated Blacksmith SVG logo" (#9329) +- Update pymongo to 4.10.0 (#9330) +- Update pymongo to 4.10.1 (#9332) +- Update user guide to recommend delay_on_commit (#9333) +- Pin pre-commit to latest version 4.0.0 (Python 3.9+) (#9334) +- Update ephem to 4.1.6 (#9336) +- Updated Blacksmith SVG logo (#9337) +- Prepare for (pre) release: v5.5.0rc1 (#9341) +- Fix: Treat dbm.error as a corrupted schedule file (#9331) +- Pin pre-commit to latest version 4.0.1 (#9343) +- Added Python 3.13 to Dockerfiles (#9350) +- Skip test_pool_restart_import_modules on PyPy due to test issue (#9352) +- Update elastic-transport requirement from <=8.15.0 to <=8.15.1 (#9347) +- added dragonfly logo (#9353) +- Update README.rst (#9354) +- Update README.rst (#9355) +- Update mypy to 1.12.0 (#9356) +- Bump Kombu to v5.5.0rc1 (#9357) +- Fix `celery --loader` option parsing (#9361) +- Add support for Google Pub/Sub transport (#9351) +- Add native incr support for GCSBackend (#9302) +- fix(perform_pending_operations): prevent task duplication on shutdown… (#9348) +- Update grpcio to 1.67.0 (#9365) +- Update google-cloud-firestore to 2.19.0 (#9364) +- Annotate celery/utils/timer2.py (#9362) +- Update cryptography to 43.0.3 (#9366) +- Update mypy to 1.12.1 (#9368) +- Bump mypy from 1.12.1 to 1.13.0 (#9373) +- Pass timeout and confirm_timeout to producer.publish() (#9374) +- Bump Kombu to v5.5.0rc2 (#9382) +- Bump pytest-cov from 5.0.0 to 6.0.0 (#9388) +- default strict to False for pydantic tasks (#9393) +- Only log that global QoS is disabled if using amqp (#9395) +- chore: update sponsorship logo (#9398) +- Allow custom hostname for celery_worker in celery.contrib.pytest / celery.contrib.testing.worker (#9405) +- Removed docker-docs from CI (optional job, malfunctioning) (#9406) +- Added a utility to format changelogs from the auto-generated GitHub release notes (#9408) +- Bump codecov/codecov-action from 4 to 5 (#9412) +- Update elasticsearch requirement from <=8.15.1 to <=8.16.0 (#9410) +- Native Delayed Delivery in RabbitMQ (#9207) +- Prepare for (pre) release: v5.5.0rc2 (#9416) +- Document usage of broker_native_delayed_delivery_queue_type (#9419) +- Adjust section in what's new document regarding quorum queues support (#9420) +- Update pytest-rerunfailures to 15.0 (#9422) +- Document group unrolling (#9421) +- fix small typo acces -> access (#9434) +- Update cryptography to 44.0.0 (#9437) +- Added pypy to Dockerfile (#9438) +- Skipped flaky tests on pypy (all pass after ~10 reruns) (#9439) +- Allowing managed credentials for azureblockblob (#9430) +- Allow passing Celery objects to the Click entry point (#9426) +- support Request termination for gevent (#9440) +- Prevent event_mask from being overwritten. (#9432) +- Update pytest to 8.3.4 (#9444) +- Prepare for (pre) release: v5.5.0rc3 (#9450) +- Bugfix: SIGQUIT not initiating cold shutdown when `task_acks_late=False` (#9461) +- Fixed pycurl dep with Python 3.8 (#9471) +- Update elasticsearch requirement from <=8.16.0 to <=8.17.0 (#9469) +- Bump pytest-subtests from 0.13.1 to 0.14.1 (#9459) +- documentation: Added a type annotation to the periodic task example (#9473) +- Prepare for (pre) release: v5.5.0rc4 (#9474) +- Bump mypy from 1.13.0 to 1.14.0 (#9476) +- Fix cassandra backend port settings not working (#9465) +- Unroll group when a group with a single item is chained using the | operator (#9456) +- fix(django): catch the right error when trying to close db connection (#9392) +- Replacing a task with a chain which contains a group now returns a result instead of hanging (#9484) +- Avoid using a group of one as it is now unrolled into a chain (#9510) +- Link to the correct IRC network (#9509) +- Bump pytest-github-actions-annotate-failures from 0.2.0 to 0.3.0 (#9504) +- Update canvas.rst to fix output result from chain object (#9502) +- Unauthorized Changes Cleanup (#9528) +- [RE-APPROVED] fix(django): catch the right error when trying to close db connection (#9529) +- [RE-APPROVED] Link to the correct IRC network (#9531) +- [RE-APPROVED] Update canvas.rst to fix output result from chain object (#9532) +- Update test-ci-base.txt (#9539) +- Update install-pyenv.sh (#9540) +- Update elasticsearch requirement from <=8.17.0 to <=8.17.1 (#9518) +- Bump google-cloud-firestore from 2.19.0 to 2.20.0 (#9493) +- Bump mypy from 1.14.0 to 1.14.1 (#9483) +- Update elastic-transport requirement from <=8.15.1 to <=8.17.0 (#9490) +- Update Dockerfile by adding missing Python version 3.13 (#9549) +- Fix typo for default of sig (#9495) +- fix(crontab): resolve constructor type conflicts (#9551) +- worker_max_memory_per_child: kilobyte is 1024 bytes (#9553) +- Fix formatting in quorum queue docs (#9555) +- Bump cryptography from 44.0.0 to 44.0.1 (#9556) +- Fix the send_task method when detecting if the native delayed delivery approach is available (#9552) +- Reverted PR #7814 & minor code improvement (#9494) +- Improved donation and sponsorship visibility (#9558) +- Updated the Getting Help section, replacing deprecated with new resources (#9559) +- Fixed django example (#9562) +- Bump Kombu to v5.5.0rc3 (#9564) +- Bump ephem from 4.1.6 to 4.2 (#9565) +- Bump pytest-celery to v1.2.0 (#9568) +- Remove dependency on `pycurl` (#9526) +- Set TestWorkController.__test__ (#9574) +- Fixed bug when revoking by stamped headers a stamp that does not exist (#9575) +- Canvas Stamping Doc Fixes (#9578) +- Bugfix: Chord with a chord in header doesn't invoke error callback on inner chord header failure (default config) (#9580) +- Prepare for (pre) release: v5.5.0rc5 (#9582) +- Bump google-cloud-firestore from 2.20.0 to 2.20.1 (#9584) +- Fix tests with Click 8.2 (#9590) +- Bump cryptography from 44.0.1 to 44.0.2 (#9591) +- Update elasticsearch requirement from <=8.17.1 to <=8.17.2 (#9594) +- Bump pytest from 8.3.4 to 8.3.5 (#9598) +- Refactored and Enhanced DelayedDelivery bootstep (#9599) +- Improve docs about acks_on_failure_or_timeout (#9577) +- Update SECURITY.md (#9609) +- remove flake8plus as not needed anymore (#9610) +- remove [bdist_wheel] universal = 0 from setup.cfg as not needed (#9611) +- remove importlib-metadata as not needed in python3.8 anymore (#9612) +- feat: define exception_safe_to_retry for redisbackend (#9614) +- Bump Kombu to v5.5.0 (#9615) +- Update elastic-transport requirement from <=8.17.0 to <=8.17.1 (#9616) +- [docs] fix first-steps (#9618) +- Revert "Improve docs about acks_on_failure_or_timeout" (#9606) +- Improve CI stability and performance (#9624) +- Improved explanation for Database transactions at user guide for tasks (#9617) +- update tests to use python 3.8 codes only (#9627) +- #9597: Ensure surpassing Hard Timeout limit when task_acks_on_failure_or_timeout is False rejects the task (#9626) +- Lock Kombu to v5.5.x (using urllib3 instead of pycurl) (#9632) +- Lock pytest-celery to v1.2.x (using urllib3 instead of pycurl) (#9633) +- Add Codecov Test Analytics (#9635) +- Bump Kombu to v5.5.2 (#9643) +- Prepare for release: v5.5.0 (#9644) + .. _version-5.5.0rc5: 5.5.0rc5 diff --git a/docs/history/whatsnew-5.5.rst b/docs/history/whatsnew-5.5.rst index d2f5f9a7958..120e3a3b5f3 100644 --- a/docs/history/whatsnew-5.5.rst +++ b/docs/history/whatsnew-5.5.rst @@ -160,8 +160,6 @@ Python 3.8 Support Python 3.8 will reach EOL in October, 2024. -Celery v5.5 will be the last version to support Python 3.8. - Minimum Dependencies -------------------- @@ -200,44 +198,81 @@ News Redis Broker Stability Improvements ----------------------------------- -The root cause of the Redis broker instability issue has been `identified and resolved `_ -in the v5.4.0 release of Kombu, which should resolve the disconnections bug and offer -additional improvements. +Long-standing disconnection issues with the Redis broker have been identified and +resolved in Kombu 5.5.0. These improvements significantly enhance stability when +using Redis as a broker, particularly in high-throughput environments. -Soft Shutdown -------------- +Additionally, the Redis backend now has better exception handling with the new +``exception_safe_to_retry`` feature, which improves resilience during temporary +Redis connection issues. See :ref:`conf-redis-result-backend` for complete +documentation. -The soft shutdown is a new mechanism in Celery that sits between the warm shutdown and the cold shutdown. -It sets a time limited "warm shutdown" period, during which the worker will continue to process tasks that -are already running. After the soft shutdown ends, the worker will initiate a graceful cold shutdown, -stopping all tasks and exiting. +``pycurl`` replaced with ``urllib3`` +------------------------------------ -The soft shutdown is disabled by default, and can be enabled by setting the new configuration option -:setting:`worker_soft_shutdown_timeout`. If a worker is not running any task when the soft shutdown initiates, -it will skip the warm shutdown period and proceed directly to the cold shutdown unless the new configuration option -:setting:`worker_enable_soft_shutdown_on_idle` is set to ``True``. This is useful for workers that are idle, -waiting on ETA tasks to be executed that still want to enable the soft shutdown anyways. +Replaced the :pypi:`pycurl` dependency with :pypi:`urllib3`. -The soft shutdown can replace the cold shutdown when using a broker with a visibility timeout mechanism, -like :ref:`Redis ` or :ref:`SQS `, to enable a more graceful cold shutdown procedure, -allowing the worker enough time to re-queue tasks that were not completed (e.g., ``Restoring 1 unacknowledged message(s)``) -by resetting the visibility timeout of the unacknowledged messages just before the worker exits completely. +We're monitoring the performance impact of this change and welcome feedback from users +who notice any significant differences in their environments. -Pydantic Support ----------------- +RabbitMQ Quorum Queues Support +------------------------------ -This release introduces support for Pydantic models in Celery tasks by @mathiasertl: +Added support for RabbitMQ's new `Quorum Queues `_ +feature, including compatibility with ETA tasks. This implementation has some limitations compared +to classic queues, so please refer to the documentation for details. -.. code-block:: bash +`Native Delayed Delivery `_ +is automatically enabled when quorum queues are detected to implement the ETA mechanism. + +See :ref:`using-quorum-queues` for complete documentation. + +Configuration options: + +- :setting:`broker_native_delayed_delivery_queue_type`: Specifies the queue type for + delayed delivery (default: ``quorum``) +- :setting:`task_default_queue_type`: Sets the default queue type for tasks + (default: ``classic``) +- :setting:`worker_detect_quorum_queues`: Controls automatic detection of quorum + queues (default: ``True``) + +Soft Shutdown Mechanism +----------------------- + +Soft shutdown is a time limited warm shutdown, initiated just before the cold shutdown. +The worker will allow :setting:`worker_soft_shutdown_timeout` seconds for all currently +executing tasks to finish before it terminates. If the time limit is reached, the worker +will initiate a cold shutdown and cancel all currently executing tasks. + +This feature is particularly valuable when using brokers with visibility timeout +mechanisms, such as Redis or SQS. It allows the worker enough time to re-queue +tasks that were not completed before exiting, preventing task loss during worker +shutdown. + +See :ref:`worker-stopping` for complete documentation on worker shutdown types. + +Configuration options: + +- :setting:`worker_soft_shutdown_timeout`: Sets the duration in seconds for the soft + shutdown period (default: ``0.0``, disabled) +- :setting:`worker_enable_soft_shutdown_on_idle`: Controls whether soft shutdown + should be enabled even when the worker is idle (default: ``False``) + +Pydantic Support +---------------- - pip install "celery[pydantic]" +New native support for Pydantic models in tasks. This integration allows you to +leverage Pydantic's powerful data validation and serialization capabilities directly +in your Celery tasks. -You can use `Pydantic `_ to validate and convert arguments as well as serializing -results based on typehints by passing ``pydantic=True``. For example: +Example usage: .. code-block:: python from pydantic import BaseModel + from celery import Celery + + app = Celery('tasks') class ArgModel(BaseModel): value: int @@ -253,49 +288,73 @@ results based on typehints by passing ``pydantic=True``. For example: # The returned model will be converted to a dict automatically return ReturnModel(value=f"example: {arg.value}") -The task can then be called using a dict matching the model, and you'll receive -the returned model "dumped" (serialized using ``BaseModel.model_dump()``): +See :ref:`task-pydantic` for complete documentation. -.. code-block:: python +Configuration options: - >>> result = x.delay({'value': 1}) - >>> result.get(timeout=1) - {'value': 'example: 1'} +- ``pydantic=True``: Enables Pydantic integration for the task +- ``pydantic_strict=True/False``: Controls whether strict validation is enabled + (default: ``False``) +- ``pydantic_context={...}``: Provides additional context for validation +- ``pydantic_dump_kwargs={...}``: Customizes serialization behavior -There are a few more options influencing Pydantic behavior: +Google Pub/Sub Transport +------------------------ -.. attribute:: Task.pydantic_strict +New support for Google Cloud Pub/Sub as a message transport, expanding Celery's +cloud integration options. - By default, `strict mode `_ - is enabled. You can pass ``False`` to disable strict model validation. +See :ref:`broker-gcpubsub` for complete documentation. -.. attribute:: Task.pydantic_context +For the Google Pub/Sub support you have to install additional dependencies: - Pass `additional validation context - `_ during - Pydantic model validation. The context already includes the application object as - ``celery_app`` and the task name as ``celery_task_name`` by default. +.. code-block:: console -.. attribute:: Task.pydantic_dump_kwargs + $ pip install "celery[gcpubsub]" - When serializing a result, pass these additional arguments to ``dump_kwargs()``. - By default, only ``mode='json'`` is passed. +Then configure your Celery application to use the Google Pub/Sub transport: -Quorum Queues Initial Support ------------------------------ +.. code-block:: python + + broker_url = 'gcpubsub://projects/project-id' -This release introduces the initial support for Quorum Queues with Celery. -See the documentation for :ref:`using-quorum-queues` for more details. +Python 3.13 Support +------------------- -In addition, you can read about the new configuration options relevant for this feature: +Official support for Python 3.13. All core dependencies have been updated to +ensure compatibility, including Kombu and py-amqp. -- :setting:`task_default_queue_type` -- :setting:`worker_detect_quorum_queues` -- :setting:`broker_native_delayed_delivery_queue_type` +This release maintains compatibility with Python 3.8 through 3.13, as well as +PyPy 3.10+. -REMAP_SIGTERM -------------- +REMAP_SIGTERM Support +--------------------- -The REMAP_SIGTERM "hidden feature" has been tested, :ref:`documented ` and is now officially supported. -This feature allows users to remap the SIGTERM signal to SIGQUIT, to initiate a soft or a cold shutdown using TERM +The "REMAP_SIGTERM" feature, previously undocumented, has been tested, documented, +and is now officially supported. This feature allows you to remap the SIGTERM +signal to SIGQUIT, enabling you to initiate a soft or cold shutdown using TERM instead of QUIT. + +This is particularly useful in containerized environments where SIGTERM is the +standard signal for graceful termination. + +See :ref:`Cold Shutdown documentation ` for more info. + +To enable this feature, set the environment variable: + +.. code-block:: bash + + export REMAP_SIGTERM="SIGQUIT" + +Database Backend Improvements +---------------------------- + +New ``create_tables_at_setup`` option for the database backend. This option +controls when database tables are created, allowing for non-lazy table creation. + +By default (``create_tables_at_setup=True``), tables are created during backend +initialization. Setting this to ``False`` defers table creation until they are +actually needed, which can be useful in certain deployment scenarios where you want +more control over database schema management. + +See :ref:`conf-database-result-backend` for complete documentation. diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index e6dba2738df..ca2f84e8f5e 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0rc5 (immunity) +:Version: 5.5.0 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From f110e3c797df36e8b3efb40449b028664c88f6ea Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 4 Apr 2025 03:38:45 +0300 Subject: [PATCH 0991/1051] Fixed "AttributeError: list object has no attribute strip" with quorum queues and failover brokers (#9657) --- .gitignore | 1 + celery/worker/consumer/delayed_delivery.py | 23 ++++++--- t/unit/worker/test_native_delayed_delivery.py | 50 +++++++++++-------- 3 files changed, 48 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index 02c9965790a..677430265ab 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,4 @@ integration-tests-config.json statefilename.* dump.rdb .env +junit.xml diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py index d7cacd08068..7a39c60f090 100644 --- a/celery/worker/consumer/delayed_delivery.py +++ b/celery/worker/consumer/delayed_delivery.py @@ -3,7 +3,7 @@ This module provides the DelayedDelivery bootstep which handles setup and configuration of native delayed delivery functionality when using quorum queues. """ -from typing import Optional, Set, ValuesView +from typing import List, Optional, Set, Union, ValuesView from kombu import Connection, Queue from kombu.transport.native_delayed_delivery import (bind_queue_to_native_delayed_delivery_exchange, @@ -195,22 +195,33 @@ def _validate_configuration(self, app: Celery) -> None: # Validate queue type self._validate_queue_type(app.conf.broker_native_delayed_delivery_queue_type) - def _validate_broker_urls(self, urls: str) -> Set[str]: + def _validate_broker_urls(self, broker_urls: Union[str, List[str]]) -> Set[str]: """Validate and split broker URLs. Args: - urls: Semicolon-separated broker URLs + broker_urls: Broker URLs, either as a semicolon-separated string + or as a list of strings Returns: Set of valid broker URLs Raises: - ValueError: If no valid broker URLs are found + ValueError: If no valid broker URLs are found or if invalid URLs are provided """ - if not urls or not urls.strip(): + if not broker_urls: raise ValueError("broker_url configuration is empty") - valid_urls = {url.strip() for url in urls.split(';') if url.strip()} + if isinstance(broker_urls, str): + brokers = broker_urls.split(";") + elif isinstance(broker_urls, list): + if not all(isinstance(url, str) for url in broker_urls): + raise ValueError("All broker URLs must be strings") + brokers = broker_urls + else: + raise ValueError(f"broker_url must be a string or list, got {broker_urls!r}") + + valid_urls = {url for url in brokers} + if not valid_urls: raise ValueError("No valid broker URLs found in configuration") diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py index fecdb514fa9..bb1c98b3887 100644 --- a/t/unit/worker/test_native_delayed_delivery.py +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -9,7 +9,7 @@ class test_DelayedDelivery: @patch('celery.worker.consumer.delayed_delivery.detect_quorum_queues', return_value=[False, ""]) - def test_include_if_no_quorum_queues_detected(self, detect_quorum_queues): + def test_include_if_no_quorum_queues_detected(self, _): consumer_mock = Mock() delayed_delivery = DelayedDelivery(consumer_mock) @@ -17,7 +17,7 @@ def test_include_if_no_quorum_queues_detected(self, detect_quorum_queues): assert delayed_delivery.include_if(consumer_mock) is False @patch('celery.worker.consumer.delayed_delivery.detect_quorum_queues', return_value=[True, ""]) - def test_include_if_quorum_queues_detected(self, detect_quorum_queues): + def test_include_if_quorum_queues_detected(self, _): consumer_mock = Mock() delayed_delivery = DelayedDelivery(consumer_mock) @@ -74,26 +74,36 @@ def test_start_native_delayed_delivery_fanout_exchange(self, caplog): assert len(caplog.records) == 0 - def test_validate_broker_urls_empty(self): + @pytest.mark.parametrize( + "broker_urls, expected_result", + [ + ("amqp://", {"amqp://"}), + ("amqp://;redis://", {"amqp://", "redis://"}), + ( + ["amqp://", "redis://", "sqs://"], + {"amqp://", "redis://", "sqs://"}, + ), + ], + ) + def test_validate_broker_urls_valid(self, broker_urls, expected_result): delayed_delivery = DelayedDelivery(Mock()) - - with pytest.raises(ValueError, match="broker_url configuration is empty"): - delayed_delivery._validate_broker_urls("") - - with pytest.raises(ValueError, match="broker_url configuration is empty"): - delayed_delivery._validate_broker_urls(None) - - def test_validate_broker_urls_invalid(self): + urls = delayed_delivery._validate_broker_urls(broker_urls) + assert urls == expected_result + + @pytest.mark.parametrize( + "broker_urls, exception_type, exception_match", + [ + ("", ValueError, "broker_url configuration is empty"), + (None, ValueError, "broker_url configuration is empty"), + ([], ValueError, "broker_url configuration is empty"), + (123, ValueError, "broker_url must be a string or list"), + (["amqp://", 123, None, "amqp://"], ValueError, "All broker URLs must be strings"), + ], + ) + def test_validate_broker_urls_invalid(self, broker_urls, exception_type, exception_match): delayed_delivery = DelayedDelivery(Mock()) - - with pytest.raises(ValueError, match="No valid broker URLs found in configuration"): - delayed_delivery._validate_broker_urls(" ; ; ") - - def test_validate_broker_urls_valid(self): - delayed_delivery = DelayedDelivery(Mock()) - - urls = delayed_delivery._validate_broker_urls("amqp://localhost;amqp://remote") - assert urls == {"amqp://localhost", "amqp://remote"} + with pytest.raises(exception_type, match=exception_match): + delayed_delivery._validate_broker_urls(broker_urls) def test_validate_queue_type_empty(self): delayed_delivery = DelayedDelivery(Mock()) From d643c7cc5dc79ee4aa1e7f9c2e409395642f4e31 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 8 Apr 2025 01:18:06 +0300 Subject: [PATCH 0992/1051] Prepare for release: v5.5.1 (#9660) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.0 → 5.5.1 * Added Changelog for v5.5.1 --- .bumpversion.cfg | 2 +- Changelog.rst | 14 ++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 14 ++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 32 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0f6b53cfb9f..0ad246fcc68 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.0 +current_version = 5.5.1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index d1c26827287..8ac777d1e34 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,20 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.1: + +5.5.1 +===== + +:release-date: 2025-04-08 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- Fixed "AttributeError: list object has no attribute strip" with quorum queues and failover brokers (#9657) +- Prepare for release: v5.5.1 (#9660) + .. _version-5.5.0: 5.5.0 diff --git a/README.rst b/README.rst index f55d7393251..85d13aba959 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.0 (immunity) +:Version: 5.5.1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 2b2459633c0..313bc3a68ad 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.0' +__version__ = '5.5.1' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 4d8c1a8c147..98668d731da 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,20 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.1: + +5.5.1 +===== + +:release-date: 2025-04-08 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- Fixed "AttributeError: list object has no attribute strip" with quorum queues and failover brokers (#9657) +- Prepare for release: v5.5.1 (#9660) + .. _version-5.5.0: 5.5.0 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index ca2f84e8f5e..308f69e3aa8 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.0 (immunity) +:Version: 5.5.1 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From f0c726121468f4368cc6d149a6370900d1b0dbf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ignacio=20Mart=C3=ADnez=20Rivera?= Date: Fri, 18 Apr 2025 00:49:41 +0200 Subject: [PATCH 0993/1051] Fix calculating remaining time across DST changes (#9669) --- celery/utils/time.py | 22 +++++++++++++--------- t/unit/utils/test_time.py | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 9 deletions(-) diff --git a/celery/utils/time.py b/celery/utils/time.py index 014bc39b22d..2376bb3b71d 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -204,7 +204,7 @@ def delta_resolution(dt: datetime, delta: timedelta) -> datetime: def remaining( start: datetime, ends_in: timedelta, now: Callable | None = None, relative: bool = False) -> timedelta: - """Calculate the remaining time for a start date and a timedelta. + """Calculate the real remaining time for a start date and a timedelta. For example, "how many seconds left for 30 seconds after start?" @@ -221,18 +221,22 @@ def remaining( ~datetime.timedelta: Remaining time. """ now = now or datetime.now(datetime_timezone.utc) - if str( - start.tzinfo) == str( - now.tzinfo) and now.utcoffset() != start.utcoffset(): - # DST started/ended - start = start.replace(tzinfo=now.tzinfo) end_date = start + ends_in if relative: end_date = delta_resolution(end_date, ends_in).replace(microsecond=0) - ret = end_date - now + + # Using UTC to calculate real time difference. + # Python by default uses wall time in arithmetic between datetimes with + # equal non-UTC timezones. + now_utc = now.astimezone(timezone.utc) + end_date_utc = end_date.astimezone(timezone.utc) + ret = end_date_utc - now_utc if C_REMDEBUG: # pragma: no cover - print('rem: NOW:{!r} START:{!r} ENDS_IN:{!r} END_DATE:{} REM:{}'.format( - now, start, ends_in, end_date, ret)) + print( + 'rem: NOW:{!r} NOW_UTC:{!r} START:{!r} ENDS_IN:{!r} ' + 'END_DATE:{} END_DATE_UTC:{!r} REM:{}'.format( + now, now_utc, start, ends_in, end_date, end_date_utc, ret) + ) return ret diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 621769252a9..3afde66888f 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -177,6 +177,41 @@ def test_remaining(): next_run = now + rem_time assert next_run == next_actual_time + """ + Case 4: DST check between now and next_run + Suppose start (which is last_run_time) and now are in EST while next_run + is in EDT, then check that the remaining time returned is the exact real + time difference (not wall time). + For example, between + 2019-03-10 01:30:00-05:00 and + 2019-03-10 03:30:00-04:00 + There is only 1 hour difference in real time, but 2 on wall time. + Python by default uses wall time in arithmetic between datetimes with + equal non-UTC timezones. + In 2019, DST starts on March 10 + """ + start = datetime( + day=10, month=3, year=2019, hour=1, + minute=30, tzinfo=eastern_tz) # EST + + now = datetime( + day=10, month=3, year=2019, hour=1, + minute=30, tzinfo=eastern_tz) # EST + delta = ffwd(hour=3, year=2019, microsecond=0, minute=30, + second=0, day=10, weeks=0, month=3) + # `next_actual_time` is the next time to run (derived from delta) + next_actual_time = datetime( + day=10, month=3, year=2019, hour=3, minute=30, tzinfo=eastern_tz) # EDT + assert start.tzname() == "EST" + assert now.tzname() == "EST" + assert next_actual_time.tzname() == "EDT" + rem_time = remaining(start, delta, now) + assert rem_time.total_seconds() == 3600 + next_run_utc = now.astimezone(ZoneInfo("UTC")) + rem_time + next_run_edt = next_run_utc.astimezone(eastern_tz) + assert next_run_utc == next_actual_time + assert next_run_edt == next_actual_time + class test_timezone: From 1aabeecea57f63802f49d43b6cbd11fb3c33a5f0 Mon Sep 17 00:00:00 2001 From: Rick Harris Date: Thu, 17 Apr 2025 20:00:05 -0500 Subject: [PATCH 0994/1051] Remove `setup_logger` from COMPAT_MODULES (#9668) In commit 5a0c4585, the deprecated `log.setup_logger` method was removed; however `COMPAT_MODULES` didn't receive the requisite update. The issue doesn't really manifest itself in normal usage of Celery. It was only when using `gc.get_objects()` that it triggered a `repr` in Celery to fail. File "/python3.12/site-packages/celery/local.py", line 121, in __repr__ obj = self._get_current_object() ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/python3.12/site-packages/celery/local.py", line 105, in _get_current_object return loc(*self.__args, **self.__kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/python3.12/site-packages/celery/local.py", line 390, in getappattr return current_app._rgetattr(path) ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/python3.12/site-packages/celery/app/base.py", line 1245, in _rgetattr return attrgetter(path)(self) ^^^^^^^^^^^^^^^^^^^^^^ AttributeError: 'Logging' object has no attribute 'setup_logger' --- celery/local.py | 1 - 1 file changed, 1 deletion(-) diff --git a/celery/local.py b/celery/local.py index 7bbe6151de2..34eafff3482 100644 --- a/celery/local.py +++ b/celery/local.py @@ -397,7 +397,6 @@ def getappattr(path): }, 'log': { 'get_default_logger': 'log.get_default_logger', - 'setup_logger': 'log.setup_logger', 'setup_logging_subsystem': 'log.setup_logging_subsystem', 'redirect_stdouts_to_logger': 'log.redirect_stdouts_to_logger', }, From 2287801006accfa0bba5959bfd2143a6825622d7 Mon Sep 17 00:00:00 2001 From: Josh Walden <42008427+jabberwock404@users.noreply.github.com> Date: Mon, 21 Apr 2025 18:05:17 -0400 Subject: [PATCH 0995/1051] fix mongodb bullet and fix contribution github links (#9672) --- CONTRIBUTING.rst | 4 ++-- docs/userguide/configuration.rst | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ef6b4ba90a4..1f7e665a6ef 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -445,10 +445,10 @@ fetch and checkout a remote branch like this:: **Note:** Any feature or fix branch should be created from ``upstream/main``. -.. _`Fork a Repo`: https://help.github.com/fork-a-repo/ +.. _`Fork a Repo`: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo .. _`Rebasing merge commits in git`: https://web.archive.org/web/20150627054345/http://marketblog.envato.com/general/rebasing-merge-commits-in-git/ -.. _`Rebase`: https://help.github.com/rebase/ +.. _`Rebase`: https://docs.github.com/en/get-started/using-git/about-git-rebase .. _contributing-docker-development: diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 56521e0400c..58f7e7f19d5 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -686,7 +686,7 @@ Can be one of the following: Use `Memcached`_ to store the results. See :ref:`conf-cache-result-backend`. -*``mongodb`` +* ``mongodb`` Use `MongoDB`_ to store the results. See :ref:`conf-mongodb-result-backend`. From e8adf7f0b4b46a710a96448a9a2f40ace03b4533 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 25 Apr 2025 23:09:08 +0300 Subject: [PATCH 0996/1051] Prepare for release: v5.5.2 (#9675) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.1 → 5.5.2 * Added Changelog for v5.5.2 --- .bumpversion.cfg | 2 +- Changelog.rst | 36 ++++++++++++++++++++++++---------- README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 34 +++++++++++++++++++++++--------- docs/includes/introduction.txt | 2 +- 6 files changed, 55 insertions(+), 23 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0ad246fcc68..0274e41ebea 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.1 +current_version = 5.5.2 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 8ac777d1e34..25847891cee 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,22 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.2: + +5.5.2 +===== + +:release-date: 2025-04-25 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- Fix calculating remaining time across DST changes (#9669) +- Remove `setup_logger` from COMPAT_MODULES (#9668) +- Fix mongodb bullet and fix github links in contributions section (#9672) +- Prepare for release: v5.5.2 (#9675) + .. _version-5.5.1: 5.5.1 @@ -632,7 +648,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -795,7 +811,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -925,7 +941,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1073,7 +1089,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1204,7 +1220,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1317,7 +1333,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1453,7 +1469,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1536,7 +1552,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1595,7 +1611,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1872,7 +1888,7 @@ The official release is planned for March-April 2024. :release-date: 2023-11-22 9:15 P.M GMT+6 :release-by: Asif Saif Uddin -This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. +This release is focused mainly to fix AWS SQS new feature comatibility issue and old regressions. The code changes are mostly fix for regressions. More details can be found below. - Increased docker-build CI job timeout from 30m -> 60m (#8635) diff --git a/README.rst b/README.rst index 85d13aba959..d16d5500cdc 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.1 (immunity) +:Version: 5.5.2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 313bc3a68ad..6e8e714eede 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.1' +__version__ = '5.5.2' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 98668d731da..665e0e4238c 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,22 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.2: + +5.5.2 +===== + +:release-date: 2025-04-25 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- Fix calculating remaining time across DST changes (#9669) +- Remove `setup_logger` from COMPAT_MODULES (#9668) +- Fix mongodb bullet and fix github links in contributions section (#9672) +- Prepare for release: v5.5.2 (#9675) + .. _version-5.5.1: 5.5.1 @@ -632,7 +648,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -795,7 +811,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -925,7 +941,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1073,7 +1089,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1204,7 +1220,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1317,7 +1333,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1453,7 +1469,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1536,7 +1552,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: @@ -1595,7 +1611,7 @@ Relevant Issues: Quorum Queues Initial Support ----------------------------- -This release introduces the initial support for Quorum Queues with Celery. +This release introduces the initial support for Quorum Queues with Celery. See new configuration options for more details: diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 308f69e3aa8..94539b5f2cd 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.1 (immunity) +:Version: 5.5.2 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From 18329c3d85b1aa7da8fc3c7fc7391596ce5f1159 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Sun, 27 Apr 2025 16:30:28 +0600 Subject: [PATCH 0997/1051] make the tests run on python 3.13 for gcs backend (#9677) * make the tests run on python 3.13 for gcs backend * remove unused import of sys --- t/unit/backends/test_gcs.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py index 32e10659136..fdb4df692a4 100644 --- a/t/unit/backends/test_gcs.py +++ b/t/unit/backends/test_gcs.py @@ -1,24 +1,13 @@ -import sys from datetime import datetime, timedelta from unittest.mock import MagicMock, Mock, call, patch import pytest from google.cloud.exceptions import NotFound +from celery.backends.gcs import GCSBackend from celery.exceptions import ImproperlyConfigured -# Workaround until python-firestore is fixed -is_py313 = sys.version_info >= (3, 13) -if not is_py313: - from celery.backends.gcs import GCSBackend -else: - GCSBackend = None - -@pytest.mark.skipif( - is_py313, - reason="https://github.com/googleapis/python-firestore/issues/973", -) class test_GCSBackend: def setup_method(self): self.app.conf.gcs_bucket = 'bucket' From bbe86be3f5f1d50ac777c7cd40679e0d92949b8b Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 5 May 2025 03:45:15 +0300 Subject: [PATCH 0998/1051] Added DeepWiki to README (#9683) --- README.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.rst b/README.rst index d16d5500cdc..65dca86b8a6 100644 --- a/README.rst +++ b/README.rst @@ -6,6 +6,7 @@ :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ +:DeepWiki: |deepwiki| :Keywords: task, queue, job, async, rabbitmq, amqp, redis, python, distributed, actors @@ -584,3 +585,8 @@ file in the top distribution directory for the full license text. .. |downloads| image:: https://pepy.tech/badge/celery :alt: Downloads :target: https://pepy.tech/project/celery + +.. |deepwiki| image:: https://devin.ai/assets/deepwiki-badge.png + :alt: Ask http://DeepWiki.com + :target: https://deepwiki.com/celery/celery + :width: 125px From 8d0a9fd4e37369d37a0d118147b691cc731285db Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Wed, 7 May 2025 22:11:24 +0300 Subject: [PATCH 0999/1051] Limit redis to <=v5.2.1 to match Kombu (#9693) --- requirements/extras/redis.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index 35731b915b4..73916a5f236 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.5.2,<6.0.0,!=4.5.5 +redis>=4.5.2,<=5.2.1,!=4.5.5 From e355132111747e4420f83994374e877fbe46f36f Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 8 May 2025 00:30:51 +0300 Subject: [PATCH 1000/1051] Use EX_OK instead of literal zero (#9684) Co-authored-by: Asif Saif Uddin --- celery/worker/control.py | 3 ++- t/unit/worker/test_control.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/celery/worker/control.py b/celery/worker/control.py index 8cbd92cbd0e..8f9fc4f92ba 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -7,6 +7,7 @@ from kombu.utils.encoding import safe_repr from celery.exceptions import WorkerShutdown +from celery.platforms import EX_OK from celery.platforms import signals as _signals from celery.utils.functional import maybe_list from celery.utils.log import get_logger @@ -580,7 +581,7 @@ def autoscale(state, max=None, min=None): def shutdown(state, msg='Got shutdown from remote', **kwargs): """Shutdown worker(s).""" logger.warning(msg) - raise WorkerShutdown(0) + raise WorkerShutdown(EX_OK) # -- Queues diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py index 877bc82c4b6..6d7e923d2db 100644 --- a/t/unit/worker/test_control.py +++ b/t/unit/worker/test_control.py @@ -669,8 +669,9 @@ def test_ping(self): def test_shutdown(self): m = {'method': 'shutdown', 'destination': hostname} - with pytest.raises(SystemExit): + with pytest.raises(SystemExit) as excinfo: self.panel.handle_message(m, None) + assert excinfo.value.code == 0 def test_panel_reply(self): From b24c8194ab7414045017d45b6947dcf9521f46ad Mon Sep 17 00:00:00 2001 From: Colin Watson Date: Wed, 7 May 2025 22:35:23 +0100 Subject: [PATCH 1001/1051] Make wheel metadata reproducible (#9687) The various `Provides-Extra` sections in the built wheel's `METADATA` file were shuffled arbitrarily between different builds, which made it more difficult to detect other reproducibility issues (see https://reproducible-builds.org/). There doesn't seem any reason for `EXTENSIONS` here to be a set; a tuple will work just as well, and adds an ordering guarantee. --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index b78932ea597..d5d68c2e772 100755 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ # -*- Extras -*- -EXTENSIONS = { +EXTENSIONS = ( 'arangodb', 'auth', 'azureblockblob', @@ -43,7 +43,7 @@ 'yaml', 'zookeeper', 'zstd' -} +) # -*- Distribution Meta -*- From c556c30794bfbe0831bbad007a00e00a0ddfc960 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 8 May 2025 14:19:31 +0600 Subject: [PATCH 1002/1051] let celery install from kombu dependencies when needed for better align (#9696) --- requirements/extras/mongodb.txt | 2 +- requirements/extras/msgpack.txt | 2 +- requirements/extras/redis.txt | 2 +- requirements/extras/sqlalchemy.txt | 2 +- requirements/extras/sqs.txt | 2 +- requirements/extras/yaml.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt index 393740b77b3..ad8da779cd0 100644 --- a/requirements/extras/mongodb.txt +++ b/requirements/extras/mongodb.txt @@ -1 +1 @@ -pymongo==4.10.1 +kombu[mongodb] diff --git a/requirements/extras/msgpack.txt b/requirements/extras/msgpack.txt index a9fdf042422..7353b6a1bc1 100644 --- a/requirements/extras/msgpack.txt +++ b/requirements/extras/msgpack.txt @@ -1 +1 @@ -msgpack==1.1.0 +kombu[msgpack] diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt index 73916a5f236..db8e01d0d2f 100644 --- a/requirements/extras/redis.txt +++ b/requirements/extras/redis.txt @@ -1 +1 @@ -redis>=4.5.2,<=5.2.1,!=4.5.5 +kombu[redis] diff --git a/requirements/extras/sqlalchemy.txt b/requirements/extras/sqlalchemy.txt index 1e8fb62d436..5e31674d2d0 100644 --- a/requirements/extras/sqlalchemy.txt +++ b/requirements/extras/sqlalchemy.txt @@ -1 +1 @@ -sqlalchemy>=1.4.48,<2.1 +kombu[sqlalchemy] diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index 4160a304451..a7be017ff2f 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1,3 +1,3 @@ boto3>=1.26.143 urllib3>=1.26.16 -kombu[sqs]>=5.3.4 +kombu[sqs]>=5.5.0 diff --git a/requirements/extras/yaml.txt b/requirements/extras/yaml.txt index 17bf7fdca15..3a80fb07098 100644 --- a/requirements/extras/yaml.txt +++ b/requirements/extras/yaml.txt @@ -1 +1 @@ -PyYAML>=3.10 +kombu[yaml] From 2914922aad904bfc292035cc5e5b295810233efe Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Thu, 8 May 2025 17:09:21 +0300 Subject: [PATCH 1003/1051] Fix stamping documentation to clarify stamped_headers key is optional in visitor methods (#9697) --- docs/userguide/canvas.rst | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 8d510a9c2a0..a39a2d65f0f 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -1259,19 +1259,25 @@ the external monitoring system, etc. def on_signature(self, sig, **headers) -> dict: return {'monitoring_id': uuid4().hex} -.. note:: +.. important:: - The ``stamped_headers`` key returned in ``on_signature`` (or any other visitor method) is used to - specify the headers that will be stamped on the task. If this key is not specified, the stamping - visitor will assume all keys in the returned dictionary are the stamped headers from the visitor. + The ``stamped_headers`` key in the dictionary returned by ``on_signature()`` (or any other visitor method) is **optional**: - This means the following code block will result in the same behavior as the previous example. + .. code-block:: python -.. code-block:: python + # Approach 1: Without stamped_headers - ALL keys are treated as stamps + def on_signature(self, sig, **headers) -> dict: + return {'monitoring_id': uuid4().hex} # monitoring_id becomes a stamp - class MonitoringIdStampingVisitor(StampingVisitor): + # Approach 2: With stamped_headers - ONLY listed keys are stamps def on_signature(self, sig, **headers) -> dict: - return {'monitoring_id': uuid4().hex, 'stamped_headers': ['monitoring_id']} + return { + 'monitoring_id': uuid4().hex, # This will be a stamp + 'other_data': 'value', # This will NOT be a stamp + 'stamped_headers': ['monitoring_id'] # Only monitoring_id is stamped + } + + If the ``stamped_headers`` key is not specified, the stamping visitor will assume all keys in the returned dictionary are stamped headers. Next, let's see how to use the ``MonitoringIdStampingVisitor`` example stamping visitor. @@ -1302,18 +1308,24 @@ visitor will be applied to the callback as well. The callback must be linked to the signature before stamping. -For example, let's examine the following custom stamping visitor. +For example, let's examine the following custom stamping visitor that uses the +implicit approach where all returned dictionary keys are automatically treated as +stamped headers without explicitly specifying `stamped_headers`. .. code-block:: python class CustomStampingVisitor(StampingVisitor): def on_signature(self, sig, **headers) -> dict: + # 'header' will automatically be treated as a stamped header + # without needing to specify 'stamped_headers': ['header'] return {'header': 'value'} def on_callback(self, callback, **header) -> dict: + # 'on_callback' will automatically be treated as a stamped header return {'on_callback': True} def on_errback(self, errback, **header) -> dict: + # 'on_errback' will automatically be treated as a stamped header return {'on_errback': True} This custom stamping visitor will stamp the signature, callbacks, and errbacks with ``{'header': 'value'}`` From 5c1a13cffe0331391bf8bc808196a1573f8922ad Mon Sep 17 00:00:00 2001 From: Yonatan Bitton Date: Fri, 9 May 2025 21:33:07 +0300 Subject: [PATCH 1004/1051] Support apply_async without queue argument on quorum queues (#9686) * Reduce logic if no eta/countdown specified; Added support for calls without queue argument; Added tests * Update celery/app/base.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Revert "Update celery/app/base.py" This reverts commit e4a8b9d0742c504859f61c9d2d0153c6b7bd3a6b. --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- celery/app/base.py | 56 +++++++++++-------- .../test_native_delayed_delivery.py | 26 +++++++++ t/unit/app/test_app.py | 52 +++++++++++++++++ 3 files changed, 110 insertions(+), 24 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index 5c853af70e5..a4d1c4cd8c9 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -833,30 +833,38 @@ def send_task(self, name, args=None, kwargs=None, countdown=None, options, route_name or name, args, kwargs, task_type) driver_type = self.producer_pool.connections.connection.transport.driver_type - is_native_delayed_delivery = detect_quorum_queues(self, driver_type)[0] - if is_native_delayed_delivery and options['queue'].exchange.type != 'direct': - if eta: - if isinstance(eta, str): - eta = isoparse(eta) - countdown = (maybe_make_aware(eta) - self.now()).total_seconds() - - if countdown: - if countdown > 0: - routing_key = calculate_routing_key(int(countdown), options["queue"].routing_key) - exchange = Exchange( - 'celery_delayed_27', - type='topic', - ) - del options['queue'] - options['routing_key'] = routing_key - options['exchange'] = exchange - elif is_native_delayed_delivery and options['queue'].exchange.type == 'direct': - logger.warning( - 'Direct exchanges are not supported with native delayed delivery.\n' - f'{options["queue"].exchange.name} is a direct exchange but should be a topic exchange or ' - 'a fanout exchange in order for native delayed delivery to work properly.\n' - 'If quorum queues are used, this task may block the worker process until the ETA arrives.' - ) + + if (eta or countdown) and detect_quorum_queues(self, driver_type)[0]: + + queue = options.get("queue") + exchange_type = queue.exchange.type if queue else options["exchange_type"] + routing_key = queue.routing_key if queue else options["routing_key"] + exchange_name = queue.exchange.name if queue else options["exchange"] + + if exchange_type != 'direct': + if eta: + if isinstance(eta, str): + eta = isoparse(eta) + countdown = (maybe_make_aware(eta) - self.now()).total_seconds() + + if countdown: + if countdown > 0: + routing_key = calculate_routing_key(int(countdown), routing_key) + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + options.pop("queue", None) + options['routing_key'] = routing_key + options['exchange'] = exchange + + else: + logger.warning( + 'Direct exchanges are not supported with native delayed delivery.\n' + f'{exchange_name} is a direct exchange but should be a topic exchange or ' + 'a fanout exchange in order for native delayed delivery to work properly.\n' + 'If quorum queues are used, this task may block the worker process until the ETA arrives.' + ) if expires is not None: if isinstance(expires, datetime): diff --git a/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py index f68efaa481e..dc5bbdaa8bb 100644 --- a/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py +++ b/t/smoke/tests/quorum_queues/test_native_delayed_delivery.py @@ -127,6 +127,32 @@ def test_countdown(self, celery_setup: CeleryTestSetup): result.get(timeout=10) + def test_countdown__no_queue_arg(self, celery_setup: CeleryTestSetup): + task_route_function = lambda *args, **kwargs: { # noqa: E731 + "routing_key": "celery", + "exchange": "celery", + "exchange_type": "topic", + } + celery_setup.app.conf.task_routes = (task_route_function,) + s = noop.s().set() + + result = s.apply_async() + + result.get(timeout=3) + + def test_countdown__no_queue_arg__countdown(self, celery_setup: CeleryTestSetup): + task_route_function = lambda *args, **kwargs: { # noqa: E731 + "routing_key": "celery", + "exchange": "celery", + "exchange_type": "topic", + } + celery_setup.app.conf.task_routes = (task_route_function,) + s = noop.s().set() + + result = s.apply_async(countdown=5) + + result.get(timeout=10) + def test_eta(self, celery_setup: CeleryTestSetup): s = noop.s().set(queue=celery_setup.worker.worker_queue) diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 9092ffaaa5c..ca2dd2b4bf1 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1451,6 +1451,58 @@ def test_native_delayed_delivery_countdown(self, detect_quorum_queues): driver_type_stub = self.app.amqp.producer_pool.connections.connection.transport.driver_type detect_quorum_queues.assert_called_once_with(self.app, driver_type_stub) + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery__no_queue_arg__no_eta(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + options = { + 'routing_key': 'testcelery', + 'exchange': 'testcelery', + 'exchange_type': 'topic', + } + self.app.amqp.router.route.return_value = options + + self.app.send_task( + name='foo', + args=(1, 2), + ) + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + **options, + ) + assert not detect_quorum_queues.called + + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) + def test_native_delayed_delivery__no_queue_arg__with_countdown(self, detect_quorum_queues): + self.app.amqp = MagicMock(name='amqp') + options = { + 'routing_key': 'testcelery', + 'exchange': 'testcelery', + 'exchange_type': 'topic', + } + self.app.amqp.router.route.return_value = options + + self.app.send_task( + name='foo', + args=(1, 2), + countdown=30, + ) + exchange = Exchange( + 'celery_delayed_27', + type='topic', + ) + self.app.amqp.send_task_message.assert_called_once_with( + ANY, + ANY, + ANY, + exchange=exchange, + routing_key='0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.1.1.1.0.testcelery', + exchange_type="topic", + ) + driver_type_stub = self.app.amqp.producer_pool.connections.connection.transport.driver_type + detect_quorum_queues.assert_called_once_with(self.app, driver_type_stub) + @patch('celery.app.base.detect_quorum_queues', return_value=[True, "testcelery"]) def test_native_delayed_delivery_eta_datetime(self, detect_quorum_queues): self.app.amqp = MagicMock(name='amqp') From e93267a3fe9a6d56c9f326f64c68eaec67ef6d6f Mon Sep 17 00:00:00 2001 From: Yonatan Bitton Date: Thu, 15 May 2025 13:43:36 +0300 Subject: [PATCH 1005/1051] updated rabbitmq doc about using quorum queues with task routes (#9707) --- .../backends-and-brokers/rabbitmq.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/getting-started/backends-and-brokers/rabbitmq.rst b/docs/getting-started/backends-and-brokers/rabbitmq.rst index 4dae16877e3..2afc3fa3291 100644 --- a/docs/getting-started/backends-and-brokers/rabbitmq.rst +++ b/docs/getting-started/backends-and-brokers/rabbitmq.rst @@ -196,6 +196,21 @@ Celery supports `Quorum Queues`_ by setting the ``x-queue-type`` header to ``quo If you'd like to change the type of the default queue, set the :setting:`task_default_queue_type` setting to ``quorum``. +Another way to configure `Quorum Queues`_ is by relying on default settings and using ``task_routes``: + +.. code-block:: python + + task_default_queue_type = "quorum" + task_default_exchange_type = "topic" + task_default_queue = "my-queue" + broker_transport_options = {"confirm_publish": True} + + task_routes = { + "*": { + "routing_key": "my-queue", + }, + } + Celery automatically detects if quorum queues are used using the :setting:`worker_detect_quorum_queues` setting. We recommend to keep the default behavior turned on. From d165f955fc9a78ecd21e982e4554d3a84f90231d Mon Sep 17 00:00:00 2001 From: Kyle Chang Date: Fri, 16 May 2025 12:52:20 -0700 Subject: [PATCH 1006/1051] Add: Dumper Unit Test (#9711) * add: dumper unit test * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * style: fix flake8 warnings in test_dumper.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: Kyle Chang Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- t/unit/events/test_dumper.py | 70 ++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 t/unit/events/test_dumper.py diff --git a/t/unit/events/test_dumper.py b/t/unit/events/test_dumper.py new file mode 100644 index 00000000000..e6f8a577e99 --- /dev/null +++ b/t/unit/events/test_dumper.py @@ -0,0 +1,70 @@ +import io +from datetime import datetime + +from celery.events import dumper + + +def test_humanize_type(): + assert dumper.humanize_type('worker-online') == 'started' + assert dumper.humanize_type('worker-offline') == 'shutdown' + assert dumper.humanize_type('worker-heartbeat') == 'heartbeat' + + +def test_dumper_say(): + buf = io.StringIO() + d = dumper.Dumper(out=buf) + d.say('hello world') + assert 'hello world' in buf.getvalue() + + +def test_format_task_event_output(): + buf = io.StringIO() + d = dumper.Dumper(out=buf) + d.format_task_event( + hostname='worker1', + timestamp=datetime(2024, 1, 1, 12, 0, 0), + type='task-succeeded', + task='mytask(123) args=(1,) kwargs={}', + event={'result': 'ok', 'foo': 'bar'} + ) + output = buf.getvalue() + assert 'worker1 [2024-01-01 12:00:00]' in output + assert 'task succeeded' in output + assert 'mytask(123) args=(1,) kwargs={}' in output + assert 'result=ok' in output + assert 'foo=bar' in output + + +def test_on_event_task_received(): + buf = io.StringIO() + d = dumper.Dumper(out=buf) + event = { + 'timestamp': datetime(2024, 1, 1, 12, 0, 0).timestamp(), + 'type': 'task-received', + 'hostname': 'worker1', + 'uuid': 'abc', + 'name': 'mytask', + 'args': '(1,)', + 'kwargs': '{}', + } + d.on_event(event.copy()) + output = buf.getvalue() + assert 'worker1 [2024-01-01 12:00:00]' in output + assert 'task received' in output + assert 'mytask(abc) args=(1,) kwargs={}' in output + + +def test_on_event_non_task(): + buf = io.StringIO() + d = dumper.Dumper(out=buf) + event = { + 'timestamp': datetime(2024, 1, 1, 12, 0, 0).timestamp(), + 'type': 'worker-online', + 'hostname': 'worker1', + 'foo': 'bar', + } + d.on_event(event.copy()) + output = buf.getvalue() + assert 'worker1 [2024-01-01 12:00:00]' in output + assert 'started' in output + assert 'foo=bar' in output From c7bb67f7412c61477473299ae2f251d5731be3c1 Mon Sep 17 00:00:00 2001 From: rogerforlife <119479168+rogerforlife@users.noreply.github.com> Date: Fri, 16 May 2025 15:32:47 -0700 Subject: [PATCH 1007/1051] Add unit test for event.group_from (#9709) * Add unit test for event.group_from * fix: flake8 --- t/unit/events/test_events.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/t/unit/events/test_events.py b/t/unit/events/test_events.py index 116e932500d..958ed509f44 100644 --- a/t/unit/events/test_events.py +++ b/t/unit/events/test_events.py @@ -337,3 +337,11 @@ def test_default_dispatcher(app): with app.events.default_dispatcher() as d: assert d assert d.connection + + +def test_group_from(): + from celery.events import event + print("event.py loaded from:", event.__file__) + assert event.group_from('task-sent') == 'task' + assert event.group_from('custom-my-event') == 'custom' + assert event.group_from('foo') == 'foo' From 5fc068cd4561ddddebaa5abdde947a90105fa59e Mon Sep 17 00:00:00 2001 From: ali rafiei <103249712+alirafiei75@users.noreply.github.com> Date: Sun, 18 May 2025 06:56:53 +0330 Subject: [PATCH 1008/1051] refactor: add beat_cron_starting_deadline documentation warning (#9712) Co-authored-by: Asif Saif Uddin --- docs/userguide/configuration.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 58f7e7f19d5..a71b0245909 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -3934,6 +3934,10 @@ When using cron, the number of seconds :mod:`~celery.bin.beat` can look back when deciding whether a cron schedule is due. When set to `None`, cronjobs that are past due will always run immediately. +.. warning:: + + Setting this higher than 3600 (1 hour) is highly discouraged. + .. setting:: beat_logfile ``beat_logfile`` From b6cab29e6932f100c9f30706ce2600ccff5e14e3 Mon Sep 17 00:00:00 2001 From: Kyle Chang Date: Sat, 17 May 2025 21:20:44 -0700 Subject: [PATCH 1009/1051] fix: resolve issue #9569 by supporting distinct broker transport options for workers (#9695) Co-authored-by: Asif Saif Uddin --- celery/app/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/celery/app/utils.py b/celery/app/utils.py index 0dd3409d575..da2ee66a071 100644 --- a/celery/app/utils.py +++ b/celery/app/utils.py @@ -35,7 +35,7 @@ """ HIDDEN_SETTINGS = re.compile( - 'API|TOKEN|KEY|SECRET|PASS|PROFANITIES_LIST|SIGNATURE|DATABASE', + 'API|TOKEN|KEY|SECRET|PASS|PROFANITIES_LIST|SIGNATURE|DATABASE|BEAT_DBURI', re.IGNORECASE, ) From b06e53f324f0cfe2b1c98b99d7a8a158d7034dea Mon Sep 17 00:00:00 2001 From: Lucas Infante Date: Sun, 18 May 2025 18:18:22 -0300 Subject: [PATCH 1010/1051] Fixes issue with retry callback argument types in DelayedDelivery (#9708) --- celery/worker/consumer/delayed_delivery.py | 5 +- t/unit/worker/test_native_delayed_delivery.py | 51 ++++++++++++++++++- 2 files changed, 52 insertions(+), 4 deletions(-) diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py index 7a39c60f090..66a55015618 100644 --- a/celery/worker/consumer/delayed_delivery.py +++ b/celery/worker/consumer/delayed_delivery.py @@ -3,7 +3,7 @@ This module provides the DelayedDelivery bootstep which handles setup and configuration of native delayed delivery functionality when using quorum queues. """ -from typing import List, Optional, Set, Union, ValuesView +from typing import Iterator, List, Optional, Set, Union, ValuesView from kombu import Connection, Queue from kombu.transport.native_delayed_delivery import (bind_queue_to_native_delayed_delivery_exchange, @@ -168,11 +168,12 @@ def _bind_queues(self, app: Celery, connection: Connection) -> None: ) raise - def _on_retry(self, exc: Exception, intervals_count: int) -> None: + def _on_retry(self, exc: Exception, interval_range: Iterator[float], intervals_count: int) -> None: """Callback for retry attempts. Args: exc: The exception that triggered the retry + interval_range: An iterator which returns the time in seconds to sleep next intervals_count: Number of retry attempts so far """ logger.warning( diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py index bb1c98b3887..7323ead7867 100644 --- a/t/unit/worker/test_native_delayed_delivery.py +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -1,10 +1,13 @@ +import itertools from logging import LogRecord +from typing import Iterator from unittest.mock import Mock, patch import pytest from kombu import Exchange, Queue +from kombu.utils.functional import retry_over_time -from celery.worker.consumer.delayed_delivery import DelayedDelivery +from celery.worker.consumer.delayed_delivery import MAX_RETRIES, RETRY_INTERVAL, DelayedDelivery class test_DelayedDelivery: @@ -151,7 +154,11 @@ def test_on_retry_logging(self, caplog): delayed_delivery = DelayedDelivery(Mock()) exc = ConnectionRefusedError("Connection refused") - delayed_delivery._on_retry(exc, 1) + # Create a dummy float iterator + interval_range = iter([1.0, 2.0, 3.0]) + intervals_count = 1 + + delayed_delivery._on_retry(exc, interval_range, intervals_count) assert len(caplog.records) == 1 record = caplog.records[0] @@ -159,6 +166,46 @@ def test_on_retry_logging(self, caplog): assert "attempt 2/3" in record.message assert "Connection refused" in record.message + def test_on_retry_argument_types(self): + delayed_delivery_instance = DelayedDelivery(parent=Mock()) + fake_exception = ConnectionRefusedError("Simulated failure") + + # Define a custom errback to check types + def type_checking_errback(self, exc, interval_range, intervals_count): + assert isinstance(exc, Exception), f"Expected Exception, got {type(exc)}" + assert isinstance(interval_range, Iterator), f"Expected Iterator, got {type(interval_range)}" + assert isinstance(intervals_count, int), f"Expected int, got {type(intervals_count)}" + + peek_iter, interval_range = itertools.tee(interval_range) + try: + first = next(peek_iter) + assert isinstance(first, float) + except StopIteration: + pass + + return 0.1 + + # Patch _setup_delayed_delivery to raise the exception immediately + with patch.object(delayed_delivery_instance, '_setup_delayed_delivery', side_effect=fake_exception): + # Patch _on_retry properly as a bound method to avoid 'missing self' + with patch.object( + delayed_delivery_instance, + '_on_retry', + new=type_checking_errback.__get__(delayed_delivery_instance) + ): + try: + with pytest.raises(ConnectionRefusedError): + retry_over_time( + delayed_delivery_instance._setup_delayed_delivery, + args=(Mock(), "amqp://localhost"), + catch=(ConnectionRefusedError,), + errback=delayed_delivery_instance._on_retry, + interval_start=RETRY_INTERVAL, + max_retries=MAX_RETRIES, + ) + except ConnectionRefusedError: + pass # expected + def test_start_with_no_queues(self, caplog): consumer_mock = Mock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' From 7cf9d8987017ee6414644ff7bf32756860255d2a Mon Sep 17 00:00:00 2001 From: rogerforlife <119479168+rogerforlife@users.noreply.github.com> Date: Sun, 18 May 2025 21:14:03 -0700 Subject: [PATCH 1011/1051] get_exchange-unit-test (#9710) * get_exchange-unit-test * fix: flake8 * Update t/unit/events/test_events.py * Update t/unit/events/test_events.py --------- Co-authored-by: Asif Saif Uddin --- t/unit/events/test_events.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/t/unit/events/test_events.py b/t/unit/events/test_events.py index 958ed509f44..21fcc5003f1 100644 --- a/t/unit/events/test_events.py +++ b/t/unit/events/test_events.py @@ -339,6 +339,39 @@ def test_default_dispatcher(app): assert d.connection +class DummyConn: + class transport: + driver_type = 'amqp' + + +def test_get_exchange_default_type(): + from celery.events import event + conn = DummyConn() + ex = event.get_exchange(conn) + assert ex.type == 'topic' + assert ex.name == event.EVENT_EXCHANGE_NAME + + +def test_get_exchange_redis_type(): + from celery.events import event + + class RedisConn: + class transport: + driver_type = 'redis' + + conn = RedisConn() + ex = event.get_exchange(conn) + assert ex.type == 'fanout' + assert ex.name == event.EVENT_EXCHANGE_NAME + + +def test_get_exchange_custom_name(): + from celery.events import event + conn = DummyConn() + ex = event.get_exchange(conn, name='custom') + assert ex.name == 'custom' + + def test_group_from(): from celery.events import event print("event.py loaded from:", event.__file__) From 5aae2ca5f455e148cd59de89907459265c5b81fc Mon Sep 17 00:00:00 2001 From: Mattias De Charleroy Date: Tue, 20 May 2025 23:13:09 +0200 Subject: [PATCH 1012/1051] ISSUE-9704: Update documentation of , filesystem backend is supported if celery beat is active --- docs/userguide/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index a71b0245909..26b4d64db71 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -885,9 +885,9 @@ on backend specifications). .. note:: For the moment this only works with the AMQP, database, cache, Couchbase, - and Redis backends. + filesystem and Redis backends. - When using the database backend, ``celery beat`` must be + When using the database or filesystem backend, ``celery beat`` must be running for the results to be expired. .. setting:: result_cache_max From b00c1446d809b8909c4932674c49a7659838aaa6 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 21 May 2025 06:09:09 +0000 Subject: [PATCH 1013/1051] update to blacksmith ubuntu 24.04 --- .github/workflows/python-package.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index f503f78bb33..fa2532cdb04 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,7 +34,7 @@ jobs: fail-fast: false matrix: python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy-3.10'] - os: ["blacksmith-4vcpu-ubuntu-2204", "windows-latest"] + os: ["blacksmith-4vcpu-ubuntu-2404", "windows-latest"] exclude: - python-version: '3.9' os: "windows-latest" @@ -89,7 +89,7 @@ jobs: if: needs.Unit.result == 'success' timeout-minutes: 240 - runs-on: blacksmith-4vcpu-ubuntu-2204 + runs-on: blacksmith-4vcpu-ubuntu-2404 strategy: fail-fast: false matrix: @@ -142,7 +142,7 @@ jobs: needs: - Unit if: needs.Unit.result == 'success' - runs-on: blacksmith-4vcpu-ubuntu-2204 + runs-on: blacksmith-4vcpu-ubuntu-2404 strategy: fail-fast: false matrix: From 778b009ffbad7c7d8061a4e7cc4c4445ec541da3 Mon Sep 17 00:00:00 2001 From: Jai <89634744+jaiganeshs21@users.noreply.github.com> Date: Sat, 24 May 2025 01:41:50 +0530 Subject: [PATCH 1014/1051] Added unit tests for celery.utils.iso8601 (#9725) * Added unit tests for celery.utils.iso8601 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * pre-commit fix --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- t/unit/utils/test_iso8601.py | 76 ++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 t/unit/utils/test_iso8601.py diff --git a/t/unit/utils/test_iso8601.py b/t/unit/utils/test_iso8601.py new file mode 100644 index 00000000000..77b695e19d4 --- /dev/null +++ b/t/unit/utils/test_iso8601.py @@ -0,0 +1,76 @@ +from datetime import datetime, timedelta, timezone + +import pytest + +from celery.exceptions import CPendingDeprecationWarning +from celery.utils.iso8601 import parse_iso8601 + + +def test_parse_iso8601_utc(): + dt = parse_iso8601("2023-10-26T10:30:00Z") + assert dt == datetime(2023, 10, 26, 10, 30, 0, tzinfo=timezone.utc) + + +def test_parse_iso8601_positive_offset(): + dt = parse_iso8601("2023-10-26T10:30:00+05:30") + expected_tz = timezone(timedelta(hours=5, minutes=30)) + assert dt == datetime(2023, 10, 26, 10, 30, 0, tzinfo=expected_tz) + + +def test_parse_iso8601_negative_offset(): + dt = parse_iso8601("2023-10-26T10:30:00-08:00") + expected_tz = timezone(timedelta(hours=-8)) + assert dt == datetime(2023, 10, 26, 10, 30, 0, tzinfo=expected_tz) + + +def test_parse_iso8601_with_microseconds(): + dt = parse_iso8601("2023-10-26T10:30:00.123456Z") + assert dt == datetime(2023, 10, 26, 10, 30, 0, 123456, tzinfo=timezone.utc) + + +def test_parse_iso8601_date_only(): + dt = parse_iso8601("2023-10-26") + assert dt == datetime(2023, 10, 26, 0, 0, 0) # Expects naive datetime + + +def test_parse_iso8601_date_hour_minute_only(): + # The regex uses '.' as a separator, often 'T' is used. + # Let's test with 'T' as it's common in ISO8601. + dt = parse_iso8601("2023-10-26T10:30") + assert dt == datetime(2023, 10, 26, 10, 30, 0) # Expects naive datetime + + +def test_parse_iso8601_invalid_string(): + with pytest.raises(ValueError, match="unable to parse date string"): + parse_iso8601("invalid-date-string") + + +def test_parse_iso8601_malformed_strings(): + # These strings match the regex but have invalid date/time component values + invalid_component_strings = [ + "2023-13-01T00:00:00Z", # Invalid month + "2023-12-32T00:00:00Z", # Invalid day + "2023-12-01T25:00:00Z", # Invalid hour + "2023-12-01T00:60:00Z", # Invalid minute + "2023-12-01T00:00:60Z", # Invalid second + ] + for s in invalid_component_strings: + # For these, the error comes from datetime constructor + with pytest.raises(ValueError): + parse_iso8601(s) + + # This string has a timezone format that is ignored by the parser, resulting in a naive datetime + ignored_tz_string = "2023-10-26T10:30:00+05:AA" + dt_ignored_tz = parse_iso8601(ignored_tz_string) + assert dt_ignored_tz == datetime(2023, 10, 26, 10, 30, 0) + assert dt_ignored_tz.tzinfo is None + + # This string does not match the main ISO8601_REGEX pattern correctly, leading to None groups + unparseable_string = "20231026T103000Z" + with pytest.raises(TypeError): # Expects TypeError due to int(None) + parse_iso8601(unparseable_string) + + +def test_parse_iso8601_deprecation_warning(): + with pytest.warns(CPendingDeprecationWarning, match="parse_iso8601 is scheduled for deprecation"): + parse_iso8601("2023-10-26T10:30:00Z") From 0c1c52b7c361b65c286b30b13973df741799f4ea Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Thu, 29 May 2025 13:12:24 +0600 Subject: [PATCH 1015/1051] Update introduction.rst docs (#9728) * Update introduction.rst docs * Update docs/getting-started/introduction.rst --- docs/getting-started/introduction.rst | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst index 3db4f3aebce..b3d47f3a2b0 100644 --- a/docs/getting-started/introduction.rst +++ b/docs/getting-started/introduction.rst @@ -39,24 +39,20 @@ What do I need? =============== .. sidebar:: Version Requirements - :subtitle: Celery version 5.3 runs on - - - Python ❨3.8, 3.9, 3.10, 3.11❩ - - PyPy3.8+ ❨v7.3.11+❩ - - Celery 4.x was the last version to support Python 2.7, - Celery 5.x requires Python 3.6 or newer. - Celery 5.1.x also requires Python 3.6 or newer. - Celery 5.2.x requires Python 3.7 or newer. + :subtitle: Celery version 5.5.x runs on: + - Python ❨3.8, 3.9, 3.10, 3.11, 3.12, 3.13❩ + - PyPy3.9+ ❨v7.3.12+❩ If you're running an older version of Python, you need to be running an older version of Celery: - - Python 2.7 or Python 3.5: Celery series 4.4 or earlier. + - Python 3.7: Celery 5.2 or earlier. + - Python 3.6: Celery 5.1 or earlier. + - Python 2.7: Celery 4.x series. - Python 2.6: Celery series 3.1 or earlier. - Python 2.5: Celery series 3.0 or earlier. - - Python 2.4 was Celery series 2.2 or earlier. + - Python 2.4: Celery series 2.2 or earlier.. Celery is a project with minimal funding, so we don't support Microsoft Windows. From 088c39c0f78b23a9cdf8d1c9e265ea64d02cfd86 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 1 Jun 2025 14:06:13 +0300 Subject: [PATCH 1016/1051] Prepare for release: v5.5.3 (#9732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bump version: 5.5.2 → 5.5.3 * Added Changelog for v5.5.3 --- .bumpversion.cfg | 2 +- Changelog.rst | 32 ++++++++++++++++++++++++++++++++ README.rst | 2 +- celery/__init__.py | 2 +- docs/history/changelog-5.5.rst | 32 ++++++++++++++++++++++++++++++++ docs/includes/introduction.txt | 2 +- 6 files changed, 68 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0274e41ebea..041bac81d1e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 5.5.2 +current_version = 5.5.3 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)? diff --git a/Changelog.rst b/Changelog.rst index 25847891cee..1eba0c056b2 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -8,6 +8,38 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.3: + +5.5.3 +===== + +:release-date: 2025-06-01 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- make the tests run on python 3.13 for gcs backend (#9677) +- Added DeepWiki to README (#9683) +- Limit redis to <=v5.2.1 to match Kombu (#9693) +- Use EX_OK instead of literal zero (#9684) +- Make wheel metadata reproducible (#9687) +- let celery install from kombu dependencies for better align (#9696) +- Fix stamping documentation to clarify stamped_headers key is optional in visitor methods (#9697) +- Support apply_async without queue argument on quorum queues (#9686) +- Updated rabbitmq doc about using quorum queues with task routes (#9707) +- Add: Dumper Unit Test (#9711) +- Add unit test for event.group_from (#9709) +- refactor: add beat_cron_starting_deadline documentation warning (#9712) +- fix: resolve issue #9569 by supporting distinct broker transport options for workers (#9695) +- Fixes issue with retry callback arguments in DelayedDelivery (#9708) +- get_exchange-unit-test (#9710) +- ISSUE-9704: Update documentation of result_expires, filesystem backend is supported (#9716) +- update to blacksmith ubuntu 24.04 (#9717) +- Added unit tests for celery.utils.iso8601 (#9725) +- Update introduction.rst docs (#9728) +- Prepare for release: v5.5.3 (#9732) + .. _version-5.5.2: 5.5.2 diff --git a/README.rst b/README.rst index 65dca86b8a6..a64c6bc0d9b 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ |build-status| |coverage| |license| |wheel| |semgrep| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge| -:Version: 5.5.2 (immunity) +:Version: 5.5.3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ diff --git a/celery/__init__.py b/celery/__init__.py index 6e8e714eede..d291dec8c80 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -17,7 +17,7 @@ SERIES = 'immunity' -__version__ = '5.5.2' +__version__ = '5.5.3' __author__ = 'Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://docs.celeryq.dev/' diff --git a/docs/history/changelog-5.5.rst b/docs/history/changelog-5.5.rst index 665e0e4238c..3a24cdef2e6 100644 --- a/docs/history/changelog-5.5.rst +++ b/docs/history/changelog-5.5.rst @@ -8,6 +8,38 @@ This document contains change notes for bugfix & new features in the main branch & 5.5.x series, please see :ref:`whatsnew-5.5` for an overview of what's new in Celery 5.5. +.. _version-5.5.3: + +5.5.3 +===== + +:release-date: 2025-06-01 +:release-by: Tomer Nosrati + +What's Changed +~~~~~~~~~~~~~~ + +- make the tests run on python 3.13 for gcs backend (#9677) +- Added DeepWiki to README (#9683) +- Limit redis to <=v5.2.1 to match Kombu (#9693) +- Use EX_OK instead of literal zero (#9684) +- Make wheel metadata reproducible (#9687) +- let celery install from kombu dependencies for better align (#9696) +- Fix stamping documentation to clarify stamped_headers key is optional in visitor methods (#9697) +- Support apply_async without queue argument on quorum queues (#9686) +- Updated rabbitmq doc about using quorum queues with task routes (#9707) +- Add: Dumper Unit Test (#9711) +- Add unit test for event.group_from (#9709) +- refactor: add beat_cron_starting_deadline documentation warning (#9712) +- fix: resolve issue #9569 by supporting distinct broker transport options for workers (#9695) +- Fixes issue with retry callback arguments in DelayedDelivery (#9708) +- get_exchange-unit-test (#9710) +- ISSUE-9704: Update documentation of result_expires, filesystem backend is supported (#9716) +- update to blacksmith ubuntu 24.04 (#9717) +- Added unit tests for celery.utils.iso8601 (#9725) +- Update introduction.rst docs (#9728) +- Prepare for release: v5.5.3 (#9732) + .. _version-5.5.2: 5.5.2 diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 94539b5f2cd..4184b38313a 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,4 +1,4 @@ -:Version: 5.5.2 (immunity) +:Version: 5.5.3 (immunity) :Web: https://docs.celeryq.dev/en/stable/index.html :Download: https://pypi.org/project/celery/ :Source: https://github.com/celery/celery/ From dfff96cf6e192e83ccf8fea2790c897b0f4456bc Mon Sep 17 00:00:00 2001 From: Christopher Barber Date: Fri, 6 Jun 2025 10:04:17 +0200 Subject: [PATCH 1017/1051] docs mention of json serializer recursive reference message size blowup --- docs/userguide/calling.rst | 7 +++++++ docs/userguide/canvas.rst | 8 ++++++++ 2 files changed, 15 insertions(+) diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index b41db9e0d10..63b8998f77f 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -517,6 +517,13 @@ json -- JSON is supported in many programming languages, is now the original one. That is, ``loads(dumps(x)) != x`` if x has non-string keys. + .. warning:: + + With more complex workflows created using :ref:`guide-canvas`, the JSON + serializer has been observed to drastically inflate message sizes due to + recursive references, leading to resource issues. The *pickle* serializer + is not vulnerable to this and may therefore be preferable in such cases. + pickle -- If you have no desire to support any language other than Python, then using the pickle encoding will gain you the support of all built-in Python data types (except class instances), smaller diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index a39a2d65f0f..0d63f509992 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -467,6 +467,14 @@ Here're some examples: 8 +.. warning:: + + :ref:`guide-routing`. + With more complex workflows, the default JSON serializer has been observed to + drastically inflate message sizes due to recursive references, leading to + resource issues. The *pickle* serializer is not vulnerable to this and may + therefore be preferable in such cases. + .. _canvas-chain: Chains From 120f13a5ec7e6448fada9ddd06b42c8b2351e4b0 Mon Sep 17 00:00:00 2001 From: Chris Barber Date: Fri, 6 Jun 2025 13:02:02 +0200 Subject: [PATCH 1018/1051] fix typo canvas.rst --- docs/userguide/canvas.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 0d63f509992..cf0e4644a5f 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -469,7 +469,6 @@ Here're some examples: .. warning:: - :ref:`guide-routing`. With more complex workflows, the default JSON serializer has been observed to drastically inflate message sizes due to recursive references, leading to resource issues. The *pickle* serializer is not vulnerable to this and may From 7054a0ee978af3672a8435f7a35d4494f04c02de Mon Sep 17 00:00:00 2001 From: Lucas Infante Date: Tue, 10 Jun 2025 02:55:16 -0300 Subject: [PATCH 1019/1051] Makes _on_retry return a float as required to be used as errback on retry_over_time (#9741) * Makes _on_retry return a float as required to be used as errback on retry_over_time * Removes witespace from empty line * Adding myself to the list of contribuitors due to previously merged PR * Trying to reduce possible sleep time during smoke tests * Adds missing import and default value * Rolls back previous changes and tries to increase timeout for failing test * Increases timeout for failing test * Tries to use a fixture to reduce possible waiting and avoid timeouts on affected tests * Resorting to increasing timeout since patching/mocking did not work * Removes change to check whether test timeouts are related or not * Adds back required changes to sort the bug and test the changes --------- Co-authored-by: Asif Saif Uddin --- CONTRIBUTORS.txt | 1 + celery/worker/consumer/delayed_delivery.py | 8 +++-- t/unit/worker/test_native_delayed_delivery.py | 30 +++++++++++++++++++ 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 45f961d8a07..737abbbcda8 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -304,3 +304,4 @@ Nikos Atlas, 2024/08/26 Marc Bresson, 2024/09/02 Narasux, 2024/09/09 Colin Watson, 2025/03/01 +Lucas Infante, 2025/05/15 diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py index 66a55015618..d71cd6b56fa 100644 --- a/celery/worker/consumer/delayed_delivery.py +++ b/celery/worker/consumer/delayed_delivery.py @@ -168,7 +168,7 @@ def _bind_queues(self, app: Celery, connection: Connection) -> None: ) raise - def _on_retry(self, exc: Exception, interval_range: Iterator[float], intervals_count: int) -> None: + def _on_retry(self, exc: Exception, interval_range: Iterator[float], intervals_count: int) -> float: """Callback for retry attempts. Args: @@ -176,10 +176,12 @@ def _on_retry(self, exc: Exception, interval_range: Iterator[float], intervals_c interval_range: An iterator which returns the time in seconds to sleep next intervals_count: Number of retry attempts so far """ + interval = next(interval_range) logger.warning( - "Retrying delayed delivery setup (attempt %d/%d) after error: %s", - intervals_count + 1, MAX_RETRIES, str(exc) + "Retrying delayed delivery setup (attempt %d/%d) after error: %s. Sleeping %.2f seconds.", + intervals_count + 1, MAX_RETRIES, str(exc), interval ) + return interval def _validate_configuration(self, app: Celery) -> None: """Validate all required configuration settings. diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py index 7323ead7867..63d1950f17e 100644 --- a/t/unit/worker/test_native_delayed_delivery.py +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -206,6 +206,36 @@ def type_checking_errback(self, exc, interval_range, intervals_count): except ConnectionRefusedError: pass # expected + def test_retry_over_time_with_float_return(self): + delayed_delivery = DelayedDelivery(parent=Mock()) + return_values = [] + + # Wrap the real _on_retry method to capture its return value + original_on_retry = delayed_delivery._on_retry + + def wrapped_on_retry(exc, interval_range, intervals_count): + result = original_on_retry(exc, interval_range, intervals_count) + return_values.append(result) + return result + + with patch.object( + delayed_delivery, '_setup_delayed_delivery', + side_effect=ConnectionRefusedError("Simulated failure") + ): + with pytest.raises(ConnectionRefusedError): + retry_over_time( + fun=delayed_delivery._setup_delayed_delivery, + args=(Mock(), "amqp://localhost"), + catch=(ConnectionRefusedError,), + errback=wrapped_on_retry, + interval_start=RETRY_INTERVAL, + max_retries=MAX_RETRIES + ) + + assert len(return_values) == MAX_RETRIES + for value in return_values: + assert isinstance(value, float), f"Expected float, got {type(value)}" + def test_start_with_no_queues(self, caplog): consumer_mock = Mock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' From d8a72b254c3b588152d417599112c12ae83b7953 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 17 Jun 2025 22:53:55 +0600 Subject: [PATCH 1020/1051] Update canvas.rst doc calculation order for callback (#9758) --- docs/userguide/canvas.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index cf0e4644a5f..82b0e1521b6 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -244,7 +244,7 @@ arguments: >>> add.apply_async((2, 2), link=add.s(8)) As expected this will first launch one task calculating :math:`2 + 2`, then -another task calculating :math:`8 + 4`. +another task calculating :math:`4 + 8`. The Primitives ============== From 202e85992b6be29fd824ba82a67dfc7967b92b64 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Jun 2025 22:32:33 +0300 Subject: [PATCH 1021/1051] Updated Blacksmith logo (#9763) --- .../images/blacksmith-logo-white-on-black.svg | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/docs/images/blacksmith-logo-white-on-black.svg b/docs/images/blacksmith-logo-white-on-black.svg index 3f8da98f3ae..3f6a87ab4e7 100644 --- a/docs/images/blacksmith-logo-white-on-black.svg +++ b/docs/images/blacksmith-logo-white-on-black.svg @@ -1,16 +1,15 @@ - - - - - - - - - - - - - - - + + + + + + + + + + + + + + From 533902142cb36eda40a8645421313e096bfe03be Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 17 Jun 2025 23:06:25 +0300 Subject: [PATCH 1022/1051] Made the Sponsors logos link to their website (#9764) --- README.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.rst b/README.rst index a64c6bc0d9b..1091a0b60d1 100644 --- a/README.rst +++ b/README.rst @@ -40,6 +40,7 @@ Blacksmith .. image:: ./docs/images/blacksmith-logo-white-on-black.svg :alt: Blacksmith logo :width: 240px + :target: https://blacksmith.sh/ `Official Announcement `_ @@ -49,6 +50,7 @@ Upstash .. image:: https://upstash.com/logo/upstash-dark-bg.svg :alt: Upstash logo :width: 200px + :target: https://upstash.com/?code=celery `Upstash `_ offers a serverless Redis database service, providing a seamless solution for Celery users looking to leverage @@ -62,6 +64,7 @@ Dragonfly .. image:: https://github.com/celery/celery/raw/main/docs/images/dragonfly.svg :alt: Dragonfly logo :width: 150px + :target: https://www.dragonflydb.io/ `Dragonfly `_ is a drop-in Redis replacement that cuts costs and boosts performance. Designed to fully utilize the power of modern cloud hardware and deliver on the data demands of modern applications, From 689581dfc15c1b164c30da256c19bf53858363ca Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 18 Jun 2025 19:07:56 +0600 Subject: [PATCH 1023/1051] add missing cloudamqp logo (#9767) --- docs/images/cloudamqp-logo-lightbg.svg | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 docs/images/cloudamqp-logo-lightbg.svg diff --git a/docs/images/cloudamqp-logo-lightbg.svg b/docs/images/cloudamqp-logo-lightbg.svg new file mode 100644 index 00000000000..5497fd29845 --- /dev/null +++ b/docs/images/cloudamqp-logo-lightbg.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + From ce7fe8a65f5edf2efca3476bdf94193d17f70192 Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Wed, 18 Jun 2025 19:27:04 +0600 Subject: [PATCH 1024/1051] Improve sponsor visibility (#9768) --- README.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/README.rst b/README.rst index 1091a0b60d1..8415508638d 100644 --- a/README.rst +++ b/README.rst @@ -44,6 +44,19 @@ Blacksmith `Official Announcement `_ +CloudAMQP +--------- + +.. image:: ./docs/images/cloudamqp-logo-lightbg.svg + :alt: CloudAMQP logo + :width: 240px + :target: https://www.cloudamqp.com/ + +`CloudAMQP `_ is a industry leading RabbitMQ as a service provider. +If you need highly available message queues, a perfect choice would be to use CloudAMQP. +With 24,000+ running instances, CloudAMQP is the leading hosting provider of RabbitMQ, +with customers all over the world. + Upstash ------- From 2a030962ed36b8433a877032e1baf867f6468c70 Mon Sep 17 00:00:00 2001 From: Diego Margoni Date: Tue, 24 Jun 2025 07:39:08 +0200 Subject: [PATCH 1025/1051] fix: (#9773) task_id must not be empty with chain as body of a chord (#9774) * fix: (#9773) task_id must not be empty with chain as body of a chord * fix:(#9773) pytest parameter name * Update t/unit/tasks/test_canvas.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: (#9773) pass new task_id (if none) and group_id in chord.run.freeze * feat: (#9773) add check_logs_for_error utility for log message verification --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- celery/canvas.py | 3 +- t/integration/conftest.py | 28 +++++++++++ t/integration/test_canvas.py | 89 +++++++++++++++++++++++++++++++- t/smoke/tests/test_canvas.py | 79 +++++++++++++++++++++++++++++ t/unit/tasks/test_canvas.py | 98 +++++++++++++++++++++++++++++++++++- 5 files changed, 294 insertions(+), 3 deletions(-) diff --git a/celery/canvas.py b/celery/canvas.py index da395c1390e..1ceeacc166d 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -2234,7 +2234,8 @@ def run(self, header, body, partial_args, app=None, interval=None, options.pop('task_id', None) body.options.update(options) - bodyres = body.freeze(task_id, root_id=root_id) + body_task_id = task_id or uuid() + bodyres = body.freeze(body_task_id, group_id=group_id, root_id=root_id) # Chains should not be passed to the header tasks. See #3771 options.pop('chain', None) diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 1707e3ca324..61b5ff85397 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -1,5 +1,7 @@ import json import os +import re +import time import pytest @@ -25,6 +27,32 @@ def get_active_redis_channels(): return get_redis_connection().execute_command('PUBSUB CHANNELS') +def check_for_logs( + caplog, + message: str, + max_wait: float = 1.0, + interval: float = 0.1 +) -> bool: + """Check if a specific message exists in the logs. + + Args: + caplog: The pytest caplog fixture + message: The message to look for, can be a regex pattern + max_wait: Maximum time to wait for the log message (in seconds) + interval: Time between log checks (in seconds) + + Returns: + bool: True if the message was found, False otherwise + """ + start_time = time.monotonic() + while time.monotonic() - start_time < max_wait: + # Check if the message is in the logs + if any(re.search(message, record.message) for record in caplog.records): + return True + time.sleep(interval) + return False + + @pytest.fixture(scope='session') def celery_config(request): config = { diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index ed838dc6730..fb544c05471 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -16,7 +16,7 @@ from celery.signals import before_task_publish, task_received from . import tasks -from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection +from .conftest import TEST_BACKEND, check_for_logs, get_active_redis_channels, get_redis_connection from .tasks import (ExpectedException, StampOnReplace, add, add_chord_to_chord, add_replaced, add_to_all, add_to_all_to_chord, build_chain_inside_task, collect_ids, delayed_sum, delayed_sum_with_soft_guard, errback_new_style, errback_old_style, fail, fail_replaced, identity, @@ -3151,6 +3151,93 @@ def test_upgraded_chord_link_error_with_header_errback_enabled(self, manager, su redis_connection.delete(errback_key, body_key) + @flaky + @pytest.mark.parametrize( + "input_body", + [ + (lambda: add.si(9, 7)), + ( + lambda: chain( + add.si(9, 7), + add.si(5, 7), + ) + ), + pytest.param( + ( + lambda: group( + [ + add.si(9, 7), + add.si(5, 7), + ] + ) + ), + marks=pytest.mark.skip(reason="Task times out"), + ), + ( + lambda: chord( + group( + [ + add.si(1, 1), + add.si(2, 2), + ] + ), + add.si(10, 10), + ) + ), + ], + ids=[ + "body is a single_task", + "body is a chain", + "body is a group", + "body is a chord", + ], + ) + def test_chord_error_propagation_with_different_body_types( + self, manager, caplog, input_body + ) -> None: + """Integration test for issue #9773: task_id must not be empty on chain of groups. + + This test reproduces the exact scenario from GitHub issue #9773 where a chord + with a failing group task and a chain body causes a ValueError during error handling. + + The test verifies that: + 1. The chord executes without the "task_id must not be empty" error + 2. The failure from the group properly propagates to the chain body + 3. Error handling works correctly with proper task IDs + + Args: + input_body (callable): A callable that returns a Celery signature for the body of the chord. + """ + try: + manager.app.backend.ensure_chords_allowed() + except NotImplementedError as e: + raise pytest.skip(e.args[0]) + + # Create the failing group header (same for all tests) + failing_chord = chain( + group( + [ + add.si(15, 7), + # failing task + fail.si(), + ] + ), + # dynamic parametrized body + input_body(), + ) + + result = failing_chord.apply_async() + + # The chain should fail due to the failing task in the group + with pytest.raises(ExpectedException): + result.get(timeout=TIMEOUT) + + # Verify that error propagation worked correctly without the task_id error + # This test passes if no "task_id must not be empty" error was logged + # Check if the message appears in the logs (it shouldn't) + error_found = check_for_logs(caplog=caplog, message="ValueError: task_id must not be empty") + assert not error_found, "The 'task_id must not be empty' error was found in the logs" + class test_signature_serialization: """ diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 02fbe9334f8..7750c365ba7 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -103,3 +103,82 @@ def test_sanity(self, celery_setup: CeleryTestSetup): ) res = sig.apply_async(queue=celery_setup.worker.worker_queue) assert res.get(timeout=RESULT_TIMEOUT) == ["body_task"] * 3 + + @pytest.mark.parametrize( + "input_body", + [ + (lambda queue: add.si(9, 7).set(queue=queue)), + ( + lambda queue: chain( + add.si(9, 7).set(queue=queue), + add.si(5, 7).set(queue=queue), + ) + ), + pytest.param( + ( + lambda queue: group( + [ + add.si(9, 7).set(queue=queue), + add.si(5, 7).set(queue=queue), + ] + ) + ), + marks=pytest.mark.skip(reason="Task times out"), + ), + ( + lambda queue: chord( + group( + [ + add.si(1, 1).set(queue=queue), + add.si(2, 2).set(queue=queue), + ] + ), + add.si(10, 10).set(queue=queue), + ) + ), + ], + ids=[ + "body is a single_task", + "body is a chain", + "body is a group", + "body is a chord", + ], + ) + def test_chord_error_propagation_with_different_body_types( + self, celery_setup: CeleryTestSetup, input_body + ) -> None: + """Reproduce issue #9773 with different chord body types. + + This test verifies that the "task_id must not be empty" error is fixed + regardless of the chord body type. The issue occurs when: + 1. A chord has a group with both succeeding and failing tasks + 2. The chord body can be any signature type (single task, chain, group, chord) + 3. When the group task fails, error propagation should work correctly + + Args: + input_body (callable): A callable that returns a Celery signature for the chord body. + """ + queue = celery_setup.worker.worker_queue + + # Create the failing group header (same for all tests) + failing_group = group( + [ + add.si(15, 7).set(queue=queue), + # failing task + fail.si().set(queue=queue), + ] + ) + + # Create the chord + test_chord = chord(failing_group, input_body(queue)) + + result = test_chord.apply_async() + + # The worker should not log the "task_id must not be empty" error + celery_setup.worker.assert_log_does_not_exist( + "ValueError: task_id must not be empty. Got None instead." + ) + + # The chord should fail with the expected exception from the failing task + with pytest.raises(ExpectedException): + result.get(timeout=RESULT_TIMEOUT) diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index d4ed5e39afd..1eb088f0c51 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1781,7 +1781,103 @@ def test_link_error_on_chord_header(self, header): assert errback == err for header_task in c.tasks: assert header_task.options['link_error'] == [err.clone(immutable=True)] - assert c.body.options['link_error'] == [err] + assert c.body.options["link_error"] == [err] + + def test_chord_run_ensures_body_has_valid_task_id(self): + """Test that chord.run() ensures body always gets a valid task ID. + + This is the unit test for the fix to issue #9773. The chord body should always + be frozen with a valid task ID to prevent "task_id must not be empty" errors. + """ + # Create a chord with header group and body chain + header = group([self.add.s(1, 1), self.add.s(2, 2)]) + body = chain(self.add.s(10, 10), self.add.s(20, 20)) + test_chord = chord(header, body) + + # Set up specific IDs for testing + chord_task_id = "test-chord-id" + group_task_id = "test-group-id" + header.options["task_id"] = group_task_id + + # Use patch to spy on body.freeze method + with patch.object(body, "freeze", wraps=body.freeze) as mock_freeze: + test_chord.run(header, body, (), task_id=chord_task_id) + + # Assert that body.freeze was called with the provided task_id and group_id + mock_freeze.assert_called_once_with( + chord_task_id, group_id=group_task_id, root_id=None + ) + + def test_chord_run_generates_task_id_when_none_provided(self): + """Test that chord.run() generates a task_id when none is provided.""" + # Create a chord with header group and body chain (no task_id set) + header = group([self.add.s(1, 1), self.add.s(2, 2)]) + body = chain(self.add.s(10, 10), self.add.s(20, 20)) + test_chord = chord(header, body) + + # Set group ID + group_id = "test-group-id" + header.options["task_id"] = group_id + + # Use patch to spy on body.freeze method + with patch.object(body, "freeze", wraps=body.freeze) as mock_freeze: + test_chord.run(header, body, (), task_id=None) + + # Assert that body.freeze was called with a generated UUID and group_id + mock_freeze.assert_called_once() + args, kwargs = mock_freeze.call_args + body_task_id = args[0] if args else kwargs.get("_id") + passed_group_id = kwargs.get("group_id") + + # Body should get a unique task ID (not None, not group_id) + assert body_task_id is not None + assert body_task_id != group_id # Should be different from group_id + assert passed_group_id == group_id # But should know its group + + def test_chord_run_body_freeze_prevents_task_id_empty_error(self): + """Test that proper body.freeze() call prevents 'task_id must not be empty' error. + + This test ensures that when chord body is frozen with a valid task ID, + subsequent error handling won't encounter the "task_id must not be empty" error. + """ + # Create chord components + header = group([self.add.s(1, 1), self.add.s(2, 2)]) + body = chain(self.add.s(10, 10), self.add.s(20, 20)) + test_chord = chord(header, body) + + # Set a group task ID + group_id = "test-group-12345" + header.options["task_id"] = group_id + + # Run the chord with external task ID + external_task_id = "external-task-id" + result = test_chord.run(header, body, (), task_id=external_task_id) + + # Verify the frozen result has the external task ID, not group_id + assert result.id == external_task_id + assert body.id is not None + assert result.parent is not None + + # Body should know its group but have its own ID + assert body.options.get('group_id') == group_id or body.id != group_id + + def test_chord_run_body_freeze_with_no_external_task_id(self): + """Test chord body gets unique ID when no external task_id provided.""" + header = group([self.add.s(1, 1), self.add.s(2, 2)]) + body = chain(self.add.s(10, 10), self.add.s(20, 20)) + test_chord = chord(header, body) + + group_id = "test-group-12345" + header.options["task_id"] = group_id + + # Run chord without external task ID + result = test_chord.run(header, body, (), task_id=None) + + # Body should get unique ID, different from group_id + assert result.id is not None + assert result.id != group_id + assert body.id is not None + assert body.id != group_id class test_maybe_signature(CanvasCase): From be47ae7fe9498a2774315b9ac2f4afcebbc53bcc Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Tue, 24 Jun 2025 22:11:13 +0600 Subject: [PATCH 1026/1051] Update setup.py to fix deprecation warning (#9771) --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index d5d68c2e772..8b86975dadd 100755 --- a/setup.py +++ b/setup.py @@ -165,7 +165,6 @@ def long_description(): }, classifiers=[ "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: BSD License", "Topic :: System :: Distributed Computing", "Topic :: Software Development :: Object Brokering", "Framework :: Celery", From dd4cf64a02d887e9425ec7d822bb623b8a88abf2 Mon Sep 17 00:00:00 2001 From: Lucas Infante Date: Wed, 25 Jun 2025 00:07:38 -0300 Subject: [PATCH 1027/1051] Adds integration test for chord_unlock bug when routed to quorum/topic queue (#9766) * Adds regression test for multiple chords using quorum queues and custom routing * Removes changes from Dockerfile * Fixes linting issues * Fixes long line linting * Removes fixed sleep * Update t/integration/test_rabbitmq_chord_unlock_routing.py * Update t/integration/test_rabbitmq_chord_unlock_routing.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update t/integration/test_rabbitmq_chord_unlock_routing.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Uses logging instead of prints --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- pyproject.toml | 2 +- .../test_rabbitmq_chord_unlock_routing.py | 155 ++++++++++++++++++ 2 files changed, 156 insertions(+), 1 deletion(-) create mode 100644 t/integration/test_rabbitmq_chord_unlock_routing.py diff --git a/pyproject.toml b/pyproject.toml index dae3f95465b..0c5c1450acf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ addopts = "--strict-markers" testpaths = "t/unit/" python_classes = "test_*" xfail_strict=true -markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module", "flaky", "timeout"] +markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module", "flaky", "timeout", "amqp"] [tool.mypy] warn_unused_configs = true diff --git a/t/integration/test_rabbitmq_chord_unlock_routing.py b/t/integration/test_rabbitmq_chord_unlock_routing.py new file mode 100644 index 00000000000..8743c922f9f --- /dev/null +++ b/t/integration/test_rabbitmq_chord_unlock_routing.py @@ -0,0 +1,155 @@ +import logging +import time +from concurrent.futures import ThreadPoolExecutor, as_completed + +import pytest +from kombu import Exchange, Queue + +from celery import Celery, chord +from celery.contrib.testing.worker import start_worker +from celery.result import allow_join_result + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="function") +def app(): + """ + Celery app configured to: + - Use quorum queues with topic exchanges + - Route chord_unlock to a dedicated quorum queue + """ + app = Celery( + "test_app", + broker="pyamqp://guest:guest@rabbit:5672//", + backend="redis://redis/0", + ) + + app.conf.task_default_exchange_type = "topic" + app.conf.task_default_exchange = "default_exchange" + app.conf.task_default_queue = "default_queue" + app.conf.task_default_routing_key = "default" + + app.conf.task_queues = [ + Queue( + "header_queue", + Exchange("header_exchange", type="topic"), + routing_key="header_rk", + queue_arguments={"x-queue-type": "quorum"}, + ), + Queue( + "chord_callback_queue", + Exchange("chord_callback_exchange", type="topic"), + routing_key="chord_callback_queue", + queue_arguments={"x-queue-type": "quorum"}, + ), + ] + + app.conf.task_routes = { + "celery.chord_unlock": { + "queue": "chord_callback_queue", + "exchange": "chord_callback_exchange", + "routing_key": "chord_callback_queue", + "exchange_type": "topic", + }, + } + + return app + + +@pytest.fixture +def add(app): + @app.task(bind=True, max_retries=3, default_retry_delay=1) + def add(self, x, y): + time.sleep(0.05) + return x + y + return add + + +@pytest.fixture +def summarize(app): + @app.task(bind=True, max_retries=3, default_retry_delay=1) + def summarize(self, results): + return sum(results) + return summarize + + +def wait_for_chord_unlock(chord_result, timeout=10, interval=0.2): + """ + Waits for chord_unlock to be enqueued by polling the `parent` of the chord result. + This confirms that the header group finished and the callback is ready to run. + """ + start = time.monotonic() + while time.monotonic() - start < timeout: + if chord_result.parent and chord_result.parent.ready(): + return True + time.sleep(interval) + return False + + +@pytest.mark.amqp +@pytest.mark.timeout(90) +@pytest.mark.xfail(reason="chord_unlock routed to quorum/topic queue intermittently fails under load") +def test_chord_unlock_stress_routing_to_quorum_queue(app, add, summarize): + """ + Reproduces Celery Discussion #9742 (intermittently): + When chord_unlock is routed to a quorum queue via topic exchange, it may not be consumed + even if declared and bound, leading to stuck results. + + This stress test submits many chords rapidly, each routed explicitly via a topic exchange, + and waits to see how many complete. + """ + chord_count = 50 + header_fanout = 3 + failures = [] + + pending_results = [] + + with allow_join_result(): + # Submit chords BEFORE worker is running + for i in range(chord_count): + header = [ + add.s(i, j).set( + queue="header_queue", + exchange="header_exchange", + routing_key="header_rk", + ) + for j in range(header_fanout) + ] + + callback = summarize.s().set( + queue="chord_callback_queue", + exchange="chord_callback_exchange", + routing_key="chord_callback_queue", + ) + + result = chord(header)(callback) + pending_results.append((i, result)) + + # Wait for chord_unlock tasks to be dispatched before starting the worker + for i, result in pending_results: + if not wait_for_chord_unlock(result): + logger.warning(f"[!] Chord {i}: unlock was not dispatched within timeout") + + # Start worker that consumes both header and callback queues + with start_worker( + app, queues=["header_queue", "chord_callback_queue"], loglevel="info", perform_ping_check=False + ): + # Poll all chord results + with ThreadPoolExecutor(max_workers=10) as executor: + futures = { + executor.submit(result.get, timeout=20): (i, result) + for i, result in pending_results + } + + for future in as_completed(futures): + i, result = futures[future] + try: + res = future.result() + logger.info(f"[✓] Chord {i} completed: {res}") + except Exception as exc: + logger.error(f"[✗] Chord {i} failed or stuck: {exc}") + failures.append((i, exc)) + + # Assertion: all chords should have completed + assert not failures, f"{len(failures)} of {chord_count} chords failed or got stuck" From de104a2fce76a8949012493b83da48daaa6d0247 Mon Sep 17 00:00:00 2001 From: Lucas Infante Date: Wed, 25 Jun 2025 13:20:36 -0300 Subject: [PATCH 1028/1051] Add xfail test for default queue/exchange fallback ignoring task_default_* settings (#9765) * Add test for fallback to 'direct' exchange and 'classic' queue when no routing is used This test verifies that Celery currently ignores task_default_exchange_type and task_default_queue_type for the default 'celery' queue/exchange, falling back to 'direct' and 'classic' respectively. Marked as xfail with strict=True to track future fix. * Fixes linting issues * Update t/integration/test_rabbitmq_default_queue_type_fallback.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Uses pyamqp instead of amqp on integration test * Tries resolving rabbitmq connection on CI * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update t/integration/test_rabbitmq_default_queue_type_fallback.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update t/integration/test_rabbitmq_default_queue_type_fallback.py Waits for both rabbit and redis ports Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Fixes linting * Increases timeouts for rabbitmq connection and test execution --------- Co-authored-by: Asif Saif Uddin Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- ...st_rabbitmq_default_queue_type_fallback.py | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 t/integration/test_rabbitmq_default_queue_type_fallback.py diff --git a/t/integration/test_rabbitmq_default_queue_type_fallback.py b/t/integration/test_rabbitmq_default_queue_type_fallback.py new file mode 100644 index 00000000000..7e1cc6c8c09 --- /dev/null +++ b/t/integration/test_rabbitmq_default_queue_type_fallback.py @@ -0,0 +1,86 @@ +import socket +import time + +import pytest +from kombu import Connection + +from celery import Celery + + +def wait_for_port(host, port, timeout=60.0): + """Wait for a port to become available.""" + start = time.time() + while time.time() - start < timeout: + try: + with socket.create_connection((host, port), timeout=2): + return + except OSError: + time.sleep(1) + raise TimeoutError(f"Timed out waiting for {host}:{port}") + + +@pytest.fixture() +def redis(): + """Fixture to provide Redis hostname and port.""" + return {"hostname": "redis", "port": 6379} + + +@pytest.fixture() +def app(rabbitmq, redis): + wait_for_port(rabbitmq.hostname, rabbitmq.ports[5672]) + wait_for_port(redis["hostname"], redis["port"]) + + return Celery( + "test_app", + broker=f"pyamqp://guest:guest@{rabbitmq.hostname}:{rabbitmq.ports[5672]}/", + backend=f"redis://{redis['hostname']}:{redis['port']}/0", + include=["t.integration.test_rabbitmq_default_queue_type_fallback"], + ) + + +@pytest.fixture() +def ping(app): + @app.task(name="ping") + def ping(): + return "pong" + return ping + + +@pytest.mark.amqp +@pytest.mark.timeout(60) +@pytest.mark.xfail( + reason=( + "Celery does not respect task_default_exchange_type/queue_type " + "when using implicit routing to the 'celery' queue. It creates " + "a classic queue and direct exchange instead." + ), + strict=True, +) +def test_fallback_to_classic_queue_and_direct_exchange(app, ping): + from celery.contrib.testing.worker import start_worker + + # Start worker and submit task + with start_worker(app, queues=["celery"], loglevel="info", perform_ping_check=False): + result = ping.delay() + assert result.get(timeout=10) == "pong" + + exchange_type = None + start_time = time.time() + timeout = 10 # Maximum wait time in seconds + + while time.time() - start_time < timeout: + with Connection(app.conf.broker_url) as conn: + with conn.channel() as channel: + try: + response = channel.exchange_declare("celery", passive=True) + exchange_type = response['type'] + break + except Exception: + time.sleep(0.5) + + if exchange_type is None: + exchange_type = "error: Exchange declaration timed out" + assert exchange_type != "direct", ( + "Expected Celery to honor task_default_exchange_type, " + f"but got: {exchange_type}" + ) From 6d8bfd1d1d3031e8c198a834a3a7bcddb7266620 Mon Sep 17 00:00:00 2001 From: Lucas Infante Date: Mon, 30 Jun 2025 23:49:02 -0300 Subject: [PATCH 1029/1051] Add xfail test for RabbitMQ quorum queue global QoS race condition (#9770) * Add xfail test for RabbitMQ quorum queue QoS race condition This test simulates a quorum queue cluster propagation race where the first worker fails quorum detection and others succeed. It starts multiple workers concurrently and expects at least one AMQP error (e.g., 540 NOT_IMPLEMENTED) caused by applying global QoS to a quorum queue. The test is marked xfail since this behavior is a known RabbitMQ limitation. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update t/integration/test_quorum_queue_qos_cluster_simulation.py * Update t/integration/test_quorum_queue_qos_cluster_simulation.py * Final version with comments * Uses processes instead of threads for better isolation * Fixes for CI * Tries adding extra safeguard to stop test * Tries to make connection parameters to rabbit and redis compatible with CI * Tries to fix test on CI * Fixes linting * Tries to force execution with no cache * Replaces cache sanitization steps. Tries fixing actions indentation. * Tries improving termination * Fixes pytest cache clearing. Tries to terminate process when catches error. * Fixes linting * Removes cache cleaning. Total refactory to try to avoid ci hanging * Adds missing xfail * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes linting * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes linting * Yet another try to avoid hanging on CI * Improves comments (just to trigger tests again that seem to be hanging on test_canvas) * Tries to improve termination to fix it on CI * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Final tentative * Adds missing xfail * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes linting * Forces xfail * Tries yet another time to fix it on CI * Tries to add extra safeguard for CI, cleaning up processes that might be stranded * Yet another try * Tries using multiprocessing manager * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fixes linting * Tries to force spawning and daemonizing * Messy WORKING version (local) * Cleans up WORKING local version * Addresses PR suggestions * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Removes debug step from integration tests flow * Reverts changes on python-package.yml * Reverts comments * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update t/integration/conftest.py * Update t/integration/test_security.py --------- Co-authored-by: Asif Saif Uddin --- t/integration/conftest.py | 43 ++--- ...est_quorum_queue_qos_cluster_simulation.py | 151 ++++++++++++++++++ t/integration/test_security.py | 12 +- t/integration/test_tasks.py | 33 ++-- 4 files changed, 195 insertions(+), 44 deletions(-) create mode 100644 t/integration/test_quorum_queue_qos_cluster_simulation.py diff --git a/t/integration/conftest.py b/t/integration/conftest.py index 61b5ff85397..2383cb2d9b6 100644 --- a/t/integration/conftest.py +++ b/t/integration/conftest.py @@ -1,21 +1,25 @@ import json +import logging import os import re import time import pytest -# we have to import the pytest plugin fixtures here, -# in case user did not do the `python setup.py develop` yet, -# that installs the pytest plugin into the setuptools registry. from celery.contrib.pytest import celery_app, celery_session_worker from celery.contrib.testing.manager import Manager from t.integration.tasks import get_redis_connection +# we have to import the pytest plugin fixtures here, +# in case user did not do the `python setup.py develop` yet, +# that installs the pytest plugin into the setuptools registry. + + +logger = logging.getLogger(__name__) + TEST_BROKER = os.environ.get('TEST_BROKER', 'pyamqp://') TEST_BACKEND = os.environ.get('TEST_BACKEND', 'redis://') -# Tricks flake8 into silencing redefining fixtures warnings. __all__ = ( 'celery_app', 'celery_session_worker', @@ -27,26 +31,9 @@ def get_active_redis_channels(): return get_redis_connection().execute_command('PUBSUB CHANNELS') -def check_for_logs( - caplog, - message: str, - max_wait: float = 1.0, - interval: float = 0.1 -) -> bool: - """Check if a specific message exists in the logs. - - Args: - caplog: The pytest caplog fixture - message: The message to look for, can be a regex pattern - max_wait: Maximum time to wait for the log message (in seconds) - interval: Time between log checks (in seconds) - - Returns: - bool: True if the message was found, False otherwise - """ +def check_for_logs(caplog, message: str, max_wait: float = 1.0, interval: float = 0.1) -> bool: start_time = time.monotonic() while time.monotonic() - start_time < max_wait: - # Check if the message is in the logs if any(re.search(message, record.message) for record in caplog.records): return True time.sleep(interval) @@ -58,19 +45,20 @@ def celery_config(request): config = { 'broker_url': TEST_BROKER, 'result_backend': TEST_BACKEND, + 'result_extended': True, 'cassandra_servers': ['localhost'], 'cassandra_keyspace': 'tests', 'cassandra_table': 'tests', 'cassandra_read_consistency': 'ONE', 'cassandra_write_consistency': 'ONE', - 'result_extended': True } try: # To override the default configuration, create the integration-tests-config.json file # in Celery's root directory. # The file must contain a dictionary of valid configuration name/value pairs. - config_overrides = json.load(open(str(request.config.rootdir / "integration-tests-config.json"))) - config.update(config_overrides) + with open(str(request.config.rootdir / "integration-tests-config.json")) as file: + overrides = json.load(file) + config.update(overrides) except OSError: pass return config @@ -100,7 +88,10 @@ def app(celery_app): def manager(app, celery_session_worker): manager = Manager(app) yield manager - manager.wait_until_idle() + try: + manager.wait_until_idle() + except Exception as e: + logger.warning("Failed to stop Celery test manager cleanly: %s", e) @pytest.fixture(autouse=True) diff --git a/t/integration/test_quorum_queue_qos_cluster_simulation.py b/t/integration/test_quorum_queue_qos_cluster_simulation.py new file mode 100644 index 00000000000..fc75cb10691 --- /dev/null +++ b/t/integration/test_quorum_queue_qos_cluster_simulation.py @@ -0,0 +1,151 @@ +import gc +import logging +import os +import pprint +import uuid + +import billiard as multiprocessing +import pytest +from kombu import Queue +from kombu.pools import connections + +from celery import Celery, _state +from celery.contrib.testing.worker import start_worker + +QOS_GLOBAL_ERROR = "qos.global not allowed" + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + + +def create_app(queue_name: str) -> Celery: + rabbitmq_user = os.environ.get("RABBITMQ_DEFAULT_USER", "guest") + rabbitmq_pass = os.environ.get("RABBITMQ_DEFAULT_PASS", "guest") + redis_host = os.environ.get("REDIS_HOST", "localhost") + redis_port = os.environ.get("REDIS_PORT", "6379") + + broker_url = os.environ.get("TEST_BROKER", f"pyamqp://{rabbitmq_user}:{rabbitmq_pass}@localhost:5672//") + backend_url = os.environ.get("TEST_BACKEND", f"redis://{redis_host}:{redis_port}/0") + + app = Celery("quorum_qos_race", broker=broker_url, backend=backend_url) + + app.conf.task_queues = [ + Queue( + name=queue_name, + queue_arguments={"x-queue-type": "quorum"}, + ) + ] + app.conf.task_default_queue = queue_name + app.conf.worker_prefetch_multiplier = 1 + app.conf.task_acks_late = True + app.conf.task_reject_on_worker_lost = True + app.conf.broker_transport_options = {"confirm_publish": True} + + return app + + +def dummy_task_factory(app: Celery, simulate_qos_issue: bool): + @app.task(name="dummy_task") + def dummy_task(): + if simulate_qos_issue: + raise Exception("qos.global not allowed on quorum queues (simulated)") + return "ok" + return dummy_task + + +def run_worker(simulate_qos_issue: bool, result_queue: multiprocessing.Queue): + queue_name = f"race_quorum_queue_{uuid.uuid4().hex}" + app = create_app(queue_name) + logger.info("[Celery config snapshot]:\n%s", pprint.pformat(dict(app.conf))) + task = dummy_task_factory(app, simulate_qos_issue) + + try: + with start_worker( + app, + queues=[queue_name], + loglevel="INFO", + perform_ping_check=False, + shutdown_timeout=15, + ): + res = task.delay() + try: + result = res.get(timeout=10) + result_queue.put({"status": "ok", "result": result}) + except Exception as e: + result_queue.put({"status": "error", "reason": str(e)}) + except Exception as e: + logger.exception("[worker %s] external failure", simulate_qos_issue) + result_queue.put({"status": "external_failure", "reason": str(e)}) + finally: + if result_queue.empty(): + result_queue.put({"status": "crash", "reason": "Worker crashed without reporting"}) + + +@pytest.mark.amqp +@pytest.mark.timeout(90) +def test_rabbitmq_quorum_qos_visibility_race(): + try: + multiprocessing.set_start_method("spawn", force=True) + except RuntimeError: + pass + + results = [] + processes = [] + queues = [] + + for i in range(3): + simulate = (i == 0) + q = multiprocessing.Queue() + queues.append(q) + + p = multiprocessing.Process(target=run_worker, args=(simulate, q)) + p.daemon = True + processes.append(p) + p.start() + + try: + for i, (p, q) in enumerate(zip(processes, queues)): + try: + p.join(timeout=30) + if p.is_alive(): + p.terminate() + p.join(timeout=10) + results.append({"status": "timeout", "reason": f"[worker {i}] timeout"}) + else: + try: + results.append(q.get(timeout=5)) + except Exception as e: + results.append({"status": "error", "reason": f"Result error: {str(e)}"}) + except Exception: + try: + results.append(q.get(timeout=5)) + except Exception: + results.append({"status": "crash", "reason": f"Worker {i} crashed and gave no result"}) + + if any(QOS_GLOBAL_ERROR in r.get("reason", "").lower() for r in results): + pytest.xfail("Detected global QoS usage on quorum queue (simulated failure)") + finally: + for i, p in enumerate(processes): + if p.is_alive(): + p.terminate() + p.join(timeout=10) + + # Reset Kombu connection pools (safe public API) + try: + connections.clear() + except Exception: + pass + + # Reset Celery app/task global state + _state._set_current_app(None) + _state._task_stack.__init__() # reinitialize stack to avoid stale state + + # Force garbage collection + gc.collect() + + # Reset multiprocessing to default (may help restore test_multiprocess_producer expectations) + if multiprocessing.get_start_method(allow_none=True) == "spawn": + try: + multiprocessing.set_start_method("fork", force=True) + except RuntimeError: + pass diff --git a/t/integration/test_security.py b/t/integration/test_security.py index 36400940439..cdb6c3abd2c 100644 --- a/t/integration/test_security.py +++ b/t/integration/test_security.py @@ -1,5 +1,6 @@ import datetime import os +import socket import tempfile import pytest @@ -106,5 +107,12 @@ def gen_certificate(self, key, common_name, issuer=None, sign_key=None): @pytest.mark.xfail(reason="Issue #5269") def test_security_task_done(self): - t1 = add.delay(1, 1) - assert t1.get() == 2 + t1 = add.apply_async((1, 1)) + try: + result = t1.get(timeout=10) # redis backend will timeout + assert result == 2 + except (socket.timeout, TimeoutError) as e: + pytest.fail( + f"Timed out waiting for task result. Task was likely dropped by " + f"worker due to security misconfig. Exception details: {e}" + ) diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 4b0839309a8..91bb7ccb4ea 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -2,10 +2,9 @@ import platform import time from datetime import datetime, timedelta, timezone -from multiprocessing import set_start_method -from time import perf_counter, sleep from uuid import uuid4 +import billiard as multiprocessing import pytest import celery @@ -23,7 +22,6 @@ TIMEOUT = 10 - _flaky = pytest.mark.flaky(reruns=5, reruns_delay=2) _timeout = pytest.mark.timeout(timeout=300) @@ -34,9 +32,9 @@ def flaky(fn): def set_multiprocessing_start_method(): """Set multiprocessing start method to 'fork' if not on Linux.""" - if platform.system() != 'Linux': + if platform.system() != "Linux": try: - set_start_method('fork') + multiprocessing.set_start_method("fork") except RuntimeError: # The method is already set pass @@ -126,7 +124,7 @@ def test_ignore_result(self, manager): assert result.get() is None # We wait since it takes a bit of time for the result to be # persisted in the result backend. - sleep(1) + time.sleep(1) assert result.result is None @flaky @@ -150,6 +148,7 @@ def test_timeout(self, manager): with pytest.raises(celery.exceptions.TimeoutError): result.get(timeout=5) + @pytest.mark.timeout(60) @flaky def test_expired(self, manager): """Testing expiration of task.""" @@ -180,27 +179,27 @@ def test_expired(self, manager): @flaky def test_eta(self, manager): """Tests tasks scheduled at some point in future.""" - start = perf_counter() + start = time.perf_counter() # Schedule task to be executed in 3 seconds result = add.apply_async((1, 1), countdown=3) - sleep(1) + time.sleep(1) assert result.status == 'PENDING' assert result.ready() is False assert result.get() == 2 - end = perf_counter() + end = time.perf_counter() assert result.status == 'SUCCESS' assert result.ready() is True # Difference between calling the task and result must be bigger than 3 secs assert (end - start) > 3 - start = perf_counter() + start = time.perf_counter() # Schedule task to be executed at time now + 3 seconds result = add.apply_async((2, 2), eta=datetime.now(timezone.utc) + timedelta(seconds=3)) - sleep(1) + time.sleep(1) assert result.status == 'PENDING' assert result.ready() is False assert result.get() == 4 - end = perf_counter() + end = time.perf_counter() assert result.status == 'SUCCESS' assert result.ready() is True # Difference between calling the task and result must be bigger than 3 secs @@ -268,6 +267,8 @@ def on_signature(self, sig, **headers) -> dict: # not match the task's stamps, allowing those tasks to proceed successfully. worker_state.revoked_stamps.clear() + @pytest.mark.timeout(20) + @pytest.mark.flaky(reruns=2) def test_revoked_by_headers_complex_canvas(self, manager, subtests): """Testing revoking of task using a stamped header""" try: @@ -370,7 +371,7 @@ def test_retry(self, manager): status = result.status if status != 'PENDING': break - sleep(0.1) + time.sleep(0.1) else: raise AssertionError("Timeout while waiting for the task to be retried") assert status == 'RETRY' @@ -386,7 +387,7 @@ def test_retry(self, manager): status = result.status if status != 'PENDING': break - sleep(0.1) + time.sleep(0.1) else: raise AssertionError("Timeout while waiting for the task to be retried") assert status == 'RETRY' @@ -411,7 +412,7 @@ def test_retry_with_unpickleable_exception(self, manager): status = job.status if status != 'PENDING': break - sleep(0.1) + time.sleep(0.1) else: raise AssertionError("Timeout while waiting for the task to be retried") @@ -501,7 +502,7 @@ class test_trace_log_arguments: def assert_trace_log(self, caplog, result, expected): # wait for logs from worker - sleep(.01) + time.sleep(.01) records = [(r.name, r.levelno, r.msg, r.data["args"], r.data["kwargs"]) for r in caplog.records From 9cb389d31ad838a42a1786df8a605c8991547cc2 Mon Sep 17 00:00:00 2001 From: Diego Margoni Date: Wed, 2 Jul 2025 10:21:48 +0200 Subject: [PATCH 1030/1051] fix: (#8786) time out when chord header fails with group body (#9788) * fix: (#8786) time out when chord header fails with group body * fix: (#8786) PyPy mock mapping compatibility --- CONTRIBUTORS.txt | 1 + celery/app/builtins.py | 11 +- celery/backends/base.py | 119 +++++++++++- celery/backends/gcs.py | 4 +- celery/backends/redis.py | 6 +- t/integration/test_canvas.py | 17 +- t/smoke/tests/test_canvas.py | 17 +- t/unit/backends/test_base.py | 364 ++++++++++++++++++++++++++++++++++- t/unit/backends/test_gcs.py | 116 +++++++++++ 9 files changed, 615 insertions(+), 40 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 737abbbcda8..528d35736f5 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -305,3 +305,4 @@ Marc Bresson, 2024/09/02 Narasux, 2024/09/09 Colin Watson, 2025/03/01 Lucas Infante, 2025/05/15 +Diego Margoni, 2025/07/01 diff --git a/celery/app/builtins.py b/celery/app/builtins.py index 1a79c40932d..66fb94a29b2 100644 --- a/celery/app/builtins.py +++ b/celery/app/builtins.py @@ -40,8 +40,8 @@ def add_unlock_chord_task(app): Will joins chord by creating a task chain polling the header for completion. """ + from celery.backends.base import _create_chord_error_with_cause from celery.canvas import maybe_signature - from celery.exceptions import ChordError from celery.result import allow_join_result, result_from_tuple @app.task(name='celery.chord_unlock', max_retries=None, shared=False, @@ -86,16 +86,15 @@ def unlock_chord(self, group_id, callback, interval=None, except StopIteration: reason = repr(exc) logger.exception('Chord %r raised: %r', group_id, exc) - app.backend.chord_error_from_stack(callback, ChordError(reason)) + chord_error = _create_chord_error_with_cause(message=reason, original_exc=exc) + app.backend.chord_error_from_stack(callback=callback, exc=chord_error) else: try: callback.delay(ret) except Exception as exc: # pylint: disable=broad-except logger.exception('Chord %r raised: %r', group_id, exc) - app.backend.chord_error_from_stack( - callback, - exc=ChordError(f'Callback error: {exc!r}'), - ) + chord_error = _create_chord_error_with_cause(message=f'Callback error: {exc!r}', original_exc=exc) + app.backend.chord_error_from_stack(callback=callback, exc=chord_error) return unlock_chord diff --git a/celery/backends/base.py b/celery/backends/base.py index dc79f4ebd73..c80591de19c 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -65,6 +65,33 @@ def unpickle_backend(cls, args, kwargs): return cls(*args, app=current_app._get_current_object(), **kwargs) +def _create_chord_error_with_cause(message, original_exc=None) -> ChordError: + """Create a ChordError preserving the original exception as __cause__. + + This helper reduces code duplication across the codebase when creating + ChordError instances that need to preserve the original exception. + """ + chord_error = ChordError(message) + if isinstance(original_exc, Exception): + chord_error.__cause__ = original_exc + return chord_error + + +def _create_fake_task_request(task_id, errbacks=None, task_name='unknown', **extra) -> Context: + """Create a fake task request context for error callbacks. + + This helper reduces code duplication when creating fake request contexts + for error callback handling. + """ + return Context({ + "id": task_id, + "errbacks": errbacks or [], + "delivery_info": dict(), + "task": task_name, + **extra + }) + + class _nulldict(dict): def ignore(self, *a, **kw): pass @@ -281,21 +308,25 @@ def mark_as_retry(self, task_id, exc, traceback=None, def chord_error_from_stack(self, callback, exc=None): app = self.app + try: backend = app._tasks[callback.task].backend except KeyError: backend = self + + # Handle group callbacks specially to prevent hanging body tasks + if isinstance(callback, group): + return self._handle_group_chord_error(group_callback=callback, backend=backend, exc=exc) # We have to make a fake request since either the callback failed or # we're pretending it did since we don't have information about the # chord part(s) which failed. This request is constructed as a best # effort for new style errbacks and may be slightly misleading about # what really went wrong, but at least we call them! - fake_request = Context({ - "id": callback.options.get("task_id"), - "errbacks": callback.options.get("link_error", []), - "delivery_info": dict(), + fake_request = _create_fake_task_request( + task_id=callback.options.get("task_id"), + errbacks=callback.options.get("link_error", []), **callback - }) + ) try: self._call_task_errbacks(fake_request, exc, None) except Exception as eb_exc: # pylint: disable=broad-except @@ -303,6 +334,74 @@ def chord_error_from_stack(self, callback, exc=None): else: return backend.fail_from_current_stack(callback.id, exc=exc) + def _handle_group_chord_error(self, group_callback, backend, exc=None): + """Handle chord errors when the callback is a group. + + When a chord header fails and the body is a group, we need to: + 1. Revoke all pending tasks in the group body + 2. Mark them as failed with the chord error + 3. Call error callbacks for each task + + This prevents the group body tasks from hanging indefinitely (#8786) + """ + + # Extract original exception from ChordError if available + if isinstance(exc, ChordError) and hasattr(exc, '__cause__') and exc.__cause__: + original_exc = exc.__cause__ + else: + original_exc = exc + + try: + # Freeze the group to get the actual GroupResult with task IDs + frozen_group = group_callback.freeze() + + if isinstance(frozen_group, GroupResult): + # revoke all tasks in the group to prevent execution + frozen_group.revoke() + + # Handle each task in the group individually + for result in frozen_group.results: + try: + # Create fake request for error callbacks + fake_request = _create_fake_task_request( + task_id=result.id, + errbacks=group_callback.options.get("link_error", []), + task_name=getattr(result, 'task', 'unknown') + ) + + # Call error callbacks for this task with original exception + try: + backend._call_task_errbacks(fake_request, original_exc, None) + except Exception: # pylint: disable=broad-except + # continue on exception to be sure to iter to all the group tasks + pass + + # Mark the individual task as failed with original exception + backend.fail_from_current_stack(result.id, exc=original_exc) + + except Exception as task_exc: # pylint: disable=broad-except + # Log error but continue with other tasks + logger.exception( + 'Failed to handle chord error for task %s: %r', + getattr(result, 'id', 'unknown'), task_exc + ) + + # Also mark the group itself as failed if it has an ID + frozen_group_id = getattr(frozen_group, 'id', None) + if frozen_group_id: + backend.mark_as_failure(frozen_group_id, original_exc) + + return None + + except Exception as cleanup_exc: # pylint: disable=broad-except + # Log the error and fall back to single task handling + logger.exception( + 'Failed to handle group chord error, falling back to single task handling: %r', + cleanup_exc + ) + # Fallback to original error handling + return backend.fail_from_current_stack(group_callback.id, exc=exc) + def fail_from_current_stack(self, task_id, exc=None): type_, real_exc, tb = sys.exc_info() try: @@ -1068,18 +1167,18 @@ def on_chord_part_return(self, request, state, result, **kwargs): ) except StopIteration: reason = repr(exc) - logger.exception('Chord %r raised: %r', gid, reason) - self.chord_error_from_stack(callback, ChordError(reason)) + chord_error = _create_chord_error_with_cause(message=reason, original_exc=exc) + self.chord_error_from_stack(callback=callback, exc=chord_error) else: try: callback.delay(ret) except Exception as exc: # pylint: disable=broad-except logger.exception('Chord %r raised: %r', gid, exc) - self.chord_error_from_stack( - callback, - ChordError(f'Callback error: {exc!r}'), + chord_error = _create_chord_error_with_cause( + message=f'Callback error: {exc!r}', original_exc=exc ) + self.chord_error_from_stack(callback=callback, exc=chord_error) finally: deps.delete() self.delete(key) diff --git a/celery/backends/gcs.py b/celery/backends/gcs.py index d667a9ccced..8a0c66bc6fb 100644 --- a/celery/backends/gcs.py +++ b/celery/backends/gcs.py @@ -8,6 +8,7 @@ from kombu.utils.functional import dictfilter from kombu.utils.url import url_to_parts +from celery.backends.base import _create_chord_error_with_cause from celery.canvas import maybe_signature from celery.exceptions import ChordError, ImproperlyConfigured from celery.result import GroupResult, allow_join_result @@ -293,7 +294,8 @@ def on_chord_part_return(self, request, state, result, **kwargs): reason = repr(exc) logger.exception('Chord %r raised: %r', gid, reason) - self.chord_error_from_stack(callback, ChordError(reason)) + chord_error = _create_chord_error_with_cause(message=reason, original_exc=exc) + self.chord_error_from_stack(callback, chord_error) else: try: callback.delay(ret) diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 3e3ef737f95..e2597be88fd 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -11,6 +11,7 @@ from celery import states from celery._state import task_join_will_block +from celery.backends.base import _create_chord_error_with_cause from celery.canvas import maybe_signature from celery.exceptions import BackendStoreError, ChordError, ImproperlyConfigured from celery.result import GroupResult, allow_join_result @@ -436,7 +437,10 @@ def _unpack_chord_result(self, tup, decode, if state in EXCEPTION_STATES: retval = self.exception_to_python(retval) if state in PROPAGATE_STATES: - raise ChordError(f'Dependency {tid} raised {retval!r}') + chord_error = _create_chord_error_with_cause( + message=f'Dependency {tid} raised {retval!r}', original_exc=retval + ) + raise chord_error return retval def set_chord_size(self, group_id, chord_size): diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index fb544c05471..d7b47362440 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -3162,16 +3162,13 @@ def test_upgraded_chord_link_error_with_header_errback_enabled(self, manager, su add.si(5, 7), ) ), - pytest.param( - ( - lambda: group( - [ - add.si(9, 7), - add.si(5, 7), - ] - ) - ), - marks=pytest.mark.skip(reason="Task times out"), + ( + lambda: group( + [ + add.si(9, 7), + add.si(5, 7), + ] + ) ), ( lambda: chord( diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index 7750c365ba7..e0886d56e49 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -114,16 +114,13 @@ def test_sanity(self, celery_setup: CeleryTestSetup): add.si(5, 7).set(queue=queue), ) ), - pytest.param( - ( - lambda queue: group( - [ - add.si(9, 7).set(queue=queue), - add.si(5, 7).set(queue=queue), - ] - ) - ), - marks=pytest.mark.skip(reason="Task times out"), + ( + lambda queue: group( + [ + add.si(9, 7).set(queue=queue), + add.si(5, 7).set(queue=queue), + ] + ) ), ( lambda queue: chord( diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index 0d4550732bf..ce25ff72ad8 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -10,9 +10,10 @@ import celery from celery import chord, group, signature, states, uuid from celery.app.task import Context, Task -from celery.backends.base import BaseBackend, DisabledBackend, KeyValueStoreBackend, _nulldict +from celery.backends.base import (BaseBackend, DisabledBackend, KeyValueStoreBackend, _create_chord_error_with_cause, + _create_fake_task_request, _nulldict) from celery.exceptions import BackendGetMetaError, BackendStoreError, ChordError, SecurityError, TimeoutError -from celery.result import result_from_tuple +from celery.result import GroupResult, result_from_tuple from celery.utils import serialization from celery.utils.functional import pass1 from celery.utils.serialization import UnpickleableExceptionWrapper @@ -683,6 +684,365 @@ def test_get_children(self): b._get_task_meta_for.return_value = {'children': 3} assert b.get_children('id') == 3 + @pytest.mark.parametrize( + "message,original_exc,expected_cause_behavior", + [ + # With exception - should preserve original exception + ( + "Dependency failed", + ValueError("original error"), + "has_cause", + ), + # Without exception (None) - should not have __cause__ + ( + "Dependency failed", + None, + "no_cause", + ), + # With non-exception - should not have __cause__ + ( + "Dependency failed", + "not an exception", + "no_cause", + ), + ], + ids=( + "with_exception", + "without_exception", + "with_non_exception", + ) + ) + def test_create_chord_error_with_cause( + self, message, original_exc, expected_cause_behavior + ): + """Test _create_chord_error_with_cause with various parameter combinations.""" + chord_error = _create_chord_error_with_cause(message, original_exc) + + # Verify basic ChordError properties + assert isinstance(chord_error, ChordError) + assert str(chord_error) == message + + # Verify __cause__ behavior based on test case + if expected_cause_behavior == "has_cause": + assert chord_error.__cause__ is original_exc + elif expected_cause_behavior == "no_cause": + assert not hasattr(chord_error, '__cause__') or chord_error.__cause__ is None + + @pytest.mark.parametrize( + "task_id,errbacks,task_name,extra_kwargs,expected_attrs", + [ + # Basic parameters test + ( + "test-task-id", + ["errback1", "errback2"], + "test.task", + {}, + { + "id": "test-task-id", + "errbacks": ["errback1", "errback2"], + "task": "test.task", + "delivery_info": {}, + }, + ), + # Default parameters test + ( + "test-task-id", + None, + None, + {}, + { + "id": "test-task-id", + "errbacks": [], + "task": "unknown", + "delivery_info": {}, + }, + ), + # Extra parameters test + ( + "test-task-id", + None, + None, + {"extra_param": "extra_value"}, + { + "id": "test-task-id", + "errbacks": [], + "task": "unknown", + "delivery_info": {}, + "extra_param": "extra_value", + }, + ), + ], + ids=( + "basic_parameters", + "default_parameters", + "extra_parameters", + ) + ) + def test_create_fake_task_request( + self, task_id, errbacks, task_name, extra_kwargs, expected_attrs + ): + """Test _create_fake_task_request with various parameter combinations.""" + # Build call arguments + args = [task_id] + if errbacks is not None: + args.append(errbacks) + if task_name is not None: + args.append(task_name) + + fake_request = _create_fake_task_request(*args, **extra_kwargs) + + # Verify all expected attributes + for attr_name, expected_value in expected_attrs.items(): + assert getattr(fake_request, attr_name) == expected_value + + def _create_mock_callback(self, task_name="test.task", spec=None, **options): + """Helper to create mock callbacks with common setup.""" + from collections.abc import Mapping + + # Create a mock that properly implements the + # mapping protocol for PyPy env compatibility + class MockCallback(Mock, Mapping): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._mapping_data = {} + + def __getitem__(self, key): + return self._mapping_data[key] + + def __iter__(self): + return iter(self._mapping_data) + + def __len__(self): + return len(self._mapping_data) + + def keys(self): + return self._mapping_data.keys() + + def items(self): + return self._mapping_data.items() + + callback = MockCallback(spec=spec) + callback.task = task_name + callback.options = {"link_error": [], **options} + + return callback + + def _setup_task_backend(self, task_name, backend=None): + """Helper to set up task with backend in app registry.""" + if backend is None: + backend = Mock() + backend.fail_from_current_stack = Mock(return_value="backend_result") + + self.app.tasks[task_name] = Mock() + self.app.tasks[task_name].backend = backend + return backend + + @pytest.mark.parametrize( + "callback_type,task_name,expected_group_handler_called", + [ + ("group", "test.group.task", True), + ("regular", "test.task", False), + ], + ids=["group_callback", "regular_callback"] + ) + def test_chord_error_from_stack_callback_dispatch(self, callback_type, task_name, expected_group_handler_called): + """Test chord_error_from_stack dispatches to correct handler based on callback type.""" + backend = self.b + + # Create callback based on type + spec = group if callback_type == "group" else None + callback = self._create_mock_callback(task_name, spec=spec) + + # Setup backend resolution + mock_backend = self._setup_task_backend(task_name) + + # Mock handlers + backend._handle_group_chord_error = Mock(return_value="group_result") + backend._call_task_errbacks = Mock() + + exc = ValueError("test exception") + result = backend.chord_error_from_stack(callback, exc) + + if expected_group_handler_called: + backend._handle_group_chord_error.assert_called_once_with( + group_callback=callback, backend=mock_backend, exc=exc + ) + assert result == "group_result" + else: + mock_backend.fail_from_current_stack.assert_called_once() + + def test_chord_error_from_stack_backend_fallback(self): + """Test chord_error_from_stack falls back to self when task not found.""" + backend = self.b + + callback = self._create_mock_callback("nonexistent.task") + + # Ensure task doesn't exist + if "nonexistent.task" in self.app.tasks: + del self.app.tasks["nonexistent.task"] + + backend._call_task_errbacks = Mock() + backend.fail_from_current_stack = Mock(return_value="self_result") + + _ = backend.chord_error_from_stack(callback, ValueError("test")) + + # Verify self was used as fallback backend + backend.fail_from_current_stack.assert_called_once() + + def _create_mock_frozen_group(self, group_id="group-id", task_ids=None, task_names=None): + """Helper to create mock frozen group with results.""" + if task_ids is None: + task_ids = ["task-id-1"] + if task_names is None: + task_names = ["test.task"] * len(task_ids) + + results = [] + for task_id, task_name in zip(task_ids, task_names): + result = Mock() + result.id = task_id + result.task = task_name + results.append(result) + + frozen_group = Mock(spec=GroupResult) + frozen_group.results = results + frozen_group.id = group_id + frozen_group.revoke = Mock() + return frozen_group + + def _setup_group_chord_error_test(self, exc=None, errbacks=None, task_ids=None): + """Common setup for group chord error tests.""" + if exc is None: + exc = ValueError("test error") + if errbacks is None: + errbacks = [] + if task_ids is None: + task_ids = ["task-id-1"] + + backend = Mock() + backend._call_task_errbacks = Mock() + backend.fail_from_current_stack = Mock() + backend.mark_as_failure = Mock() + + group_callback = Mock(spec=group) + group_callback.options = {"link_error": errbacks} + + frozen_group = self._create_mock_frozen_group(task_ids=task_ids) + group_callback.freeze.return_value = frozen_group + + return self.b, backend, group_callback, frozen_group, exc + + @pytest.mark.parametrize( + "exception_setup,expected_exc_used", + [ + ("with_cause", "original"), + ("without_cause", "direct"), + ], + ids=["extracts_cause", "without_cause"] + ) + def test_handle_group_chord_error_exception_handling(self, exception_setup, expected_exc_used): + """Test _handle_group_chord_error handles exceptions with and without __cause__.""" + # Setup exceptions based on test case + if exception_setup == "with_cause": + original_exc = ValueError("original error") + exc = ChordError("wrapped error") + exc.__cause__ = original_exc + expected_exc = original_exc + else: + exc = ValueError("direct error") + expected_exc = exc + + b, backend, group_callback, frozen_group, _ = self._setup_group_chord_error_test(exc=exc) + + # Call the method + _ = b._handle_group_chord_error(group_callback, backend, exc) + + # Verify correct exception was used + backend.fail_from_current_stack.assert_called_with("task-id-1", exc=expected_exc) + backend.mark_as_failure.assert_called_with("group-id", expected_exc) + frozen_group.revoke.assert_called_once() + + def test_handle_group_chord_error_multiple_tasks(self): + """Test _handle_group_chord_error handles multiple tasks in group.""" + task_ids = ["task-id-1", "task-id-2"] + b, backend, group_callback, frozen_group, exc = self._setup_group_chord_error_test(task_ids=task_ids) + + # Call the method + b._handle_group_chord_error(group_callback, backend, exc) + + # Verify group revocation and all tasks handled + frozen_group.revoke.assert_called_once() + assert backend.fail_from_current_stack.call_count == 2 + backend.fail_from_current_stack.assert_any_call("task-id-1", exc=exc) + backend.fail_from_current_stack.assert_any_call("task-id-2", exc=exc) + + def test_handle_group_chord_error_with_errbacks(self): + """Test _handle_group_chord_error calls error callbacks for each task.""" + errbacks = ["errback1", "errback2"] + b, backend, group_callback, frozen_group, exc = self._setup_group_chord_error_test(errbacks=errbacks) + + # Call the method + b._handle_group_chord_error(group_callback, backend, exc) + + # Verify error callbacks were called + backend._call_task_errbacks.assert_called_once() + call_args = backend._call_task_errbacks.call_args + fake_request = call_args[0][0] + + # Verify fake request was created correctly + assert fake_request.id == "task-id-1" + assert fake_request.errbacks == errbacks + assert fake_request.task == "test.task" + + def test_handle_group_chord_error_cleanup_exception_handling(self): + """Test _handle_group_chord_error handles cleanup exceptions gracefully.""" + b = self.b + backend = Mock() + + exc = ValueError("test error") + + # Mock group callback that raises exception during freeze + group_callback = Mock(spec=group) + group_callback.freeze.side_effect = RuntimeError("freeze failed") + + # Mock fallback behavior + backend.fail_from_current_stack = Mock(return_value="fallback_result") + + # Should not raise exception, but return fallback result + result = b._handle_group_chord_error(group_callback, backend, exc) + + # Verify fallback was called - the method returns an ExceptionInfo when cleanup fails + # and falls back to single task handling + assert result is not None # Method returns ExceptionInfo from fail_from_current_stack + + def test_handle_group_chord__exceptions_paths(self, caplog): + """Test _handle_group_chord handles exceptions in various paths.""" + backend = Mock() + + # Mock group callback + group_callback = Mock(spec=group) + group_callback.options = {"link_error": []} + + # Mock frozen group with multiple results + mock_result1 = Mock() + mock_result1.id = "task-id-1" + mock_result2 = Mock() + mock_result2.id = "task-id-2" + + frozen_group = Mock(spec=GroupResult) + frozen_group.results = [mock_result1, mock_result2] + frozen_group.revoke = Mock() + + group_callback.freeze.return_value = frozen_group + + # Test exception during fail_from_current_stack + backend._call_task_errbacks.side_effect = RuntimeError("fail on _call_task_errbacks") + + backend.fail_from_current_stack.side_effect = RuntimeError("fail on fail_from_current_stack") + + _ = self.b._handle_group_chord_error(group_callback, backend, ValueError("test error")) + + assert "Failed to handle chord error for task" in caplog.text + class test_KeyValueStoreBackend: diff --git a/t/unit/backends/test_gcs.py b/t/unit/backends/test_gcs.py index fdb4df692a4..678310c685f 100644 --- a/t/unit/backends/test_gcs.py +++ b/t/unit/backends/test_gcs.py @@ -471,3 +471,119 @@ def test_firestore_document( ) mock_collection.document.assert_called_once_with('test_key') assert result == mock_document + + @patch('celery.backends.gcs.maybe_signature') + @patch.object(GCSBackend, 'incr') + @patch.object(GCSBackend, '_restore_deps') + @patch.object(GCSBackend, '_delete_chord_key') + @patch.object(GCSBackend, 'chord_error_from_stack') + @patch('celery.backends.gcs.allow_join_result') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_on_chord_part_return_join_exception( + self, + mock_firestore_ttl, + mock_allow_join_result_, + mock_chord_error_from_stack, + mock_delete_chord_key, + mock_restore_deps, + mock_incr, + mock_maybe_signature, + ): + """Test on_chord_part_return when join_native raises exception.""" + request = MagicMock() + request.group = 'group_id' + request.chord = {'chord_size': 2} + state = MagicMock() + result = MagicMock() + + mock_firestore_ttl.return_value = True + mock_incr.return_value = 2 + + # Mock dependencies and callback + mock_deps = MagicMock() + mock_restore_deps.return_value = mock_deps + mock_callback = MagicMock() + mock_maybe_signature.return_value = mock_callback + + # Make join_native raise an exception + join_exception = ValueError('Join failed') + mock_deps.join_native.side_effect = join_exception + mock_deps._failed_join_report.return_value = iter([]) # No culprit found + + backend = GCSBackend(app=self.app) + backend.on_chord_part_return(request, state, result) + + # Verify chord_error_from_stack was called with the exception + mock_chord_error_from_stack.assert_called_once() + call_args = mock_chord_error_from_stack.call_args + assert call_args[0][0] == mock_callback # callback argument + chord_error_arg = call_args[0][1] # exc argument + assert 'ValueError' in str(chord_error_arg) + assert chord_error_arg.__cause__ == join_exception + + # Verify cleanup still happens + mock_deps.delete.assert_called_once() + mock_delete_chord_key.assert_called_once() + + @patch('celery.backends.gcs.maybe_signature') + @patch.object(GCSBackend, 'incr') + @patch.object(GCSBackend, '_restore_deps') + @patch.object(GCSBackend, '_delete_chord_key') + @patch.object(GCSBackend, 'chord_error_from_stack') + @patch('celery.backends.gcs.allow_join_result') + @patch.object(GCSBackend, '_is_firestore_ttl_policy_enabled') + def test_on_chord_part_return_callback_exception( + self, + mock_firestore_ttl, + mock_allow_join_result_, + mock_chord_error_from_stack, + mock_delete_chord_key, + mock_restore_deps, + mock_incr, + mock_maybe_signature, + ): + """Test on_chord_part_return when callback.delay raises exception (line 302).""" + request = MagicMock() + request.group = 'group_id' + request.chord = {'chord_size': 2} + state = MagicMock() + result = MagicMock() + + mock_firestore_ttl.return_value = True + mock_incr.return_value = 2 + + # Mock dependencies and callback + mock_deps = MagicMock() + mock_restore_deps.return_value = mock_deps + mock_deps.join_native.return_value = ['result1', 'result2'] + + mock_callback = MagicMock() + mock_maybe_signature.return_value = mock_callback + + # Make callback.delay raise an exception + callback_exception = RuntimeError('Callback failed') + mock_callback.delay.side_effect = callback_exception + + backend = GCSBackend(app=self.app) + backend.on_chord_part_return(request, state, result) + + # Verify join was successful first + mock_deps.join_native.assert_called_once_with( + timeout=self.app.conf.result_chord_join_timeout, + propagate=True, + ) + + # Verify callback.delay was called and failed + mock_callback.delay.assert_called_once_with(['result1', 'result2']) + + # Verify chord_error_from_stack was called with ChordError + mock_chord_error_from_stack.assert_called_once() + call_args = mock_chord_error_from_stack.call_args + assert call_args[0][0] == mock_callback # callback argument + chord_error_arg = call_args[0][1] # exc argument + assert 'Callback error:' in str(chord_error_arg) + assert 'RuntimeError' in str(chord_error_arg) + + # Verify cleanup still happens + mock_deps.delete.assert_called_once() + mock_delete_chord_key.assert_called_once() From a83070e5ec748c32325332db422756cfdd709aae Mon Sep 17 00:00:00 2001 From: Diego Margoni Date: Thu, 3 Jul 2025 05:32:55 +0200 Subject: [PATCH 1031/1051] Fix #9738 : Add root_id and parent_id to .apply() (#9784) * ISSUE-9738: Add root_id and parent_id for .apply() * ISSUE-9738: Flake8 fix * Update celery/app/task.py * tests: (#9738) root_id and parent_id for .apply() --------- Co-authored-by: Asif Saif Uddin --- celery/app/task.py | 10 ++++ t/integration/test_tasks.py | 80 ++++++++++++++++++++++++++ t/unit/tasks/test_tasks.py | 109 ++++++++++++++++++++++++++++++++++++ 3 files changed, 199 insertions(+) diff --git a/celery/app/task.py b/celery/app/task.py index 90ba8552d4f..1688eafd01b 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -790,12 +790,22 @@ def apply(self, args=None, kwargs=None, if throw is None: throw = app.conf.task_eager_propagates + parent_task = _task_stack.top + if parent_task and parent_task.request: + parent_id = parent_task.request.id + root_id = parent_task.request.root_id or task_id + else: + parent_id = None + root_id = task_id + # Make sure we get the task instance, not class. task = app._tasks[self.name] request = { 'id': task_id, 'task': self.name, + 'parent_id': parent_id, + 'root_id': root_id, 'retries': retries, 'is_eager': True, 'logfile': logfile, diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 91bb7ccb4ea..cd2bd25a36b 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -496,6 +496,86 @@ def test_soft_time_limit_exceeding_time_limit(self): assert result.status == 'FAILURE' +class test_apply_tasks: + """Tests for tasks called via apply() method.""" + + def test_apply_single_task_ids(self, manager): + """Test that a single task called via apply() has correct IDs.""" + @manager.app.task(bind=True) + def single_apply_task(self): + return { + 'task_id': self.request.id, + 'parent_id': self.request.parent_id, + 'root_id': self.request.root_id, + } + + result = single_apply_task.apply() + data = result.get() + + # Single task should have no parent and root_id should equal task_id + assert data['parent_id'] is None + assert data['root_id'] == data['task_id'] + + def test_apply_nested_parent_child_relationship(self, manager): + """Test parent-child relationship when one task calls another via apply().""" + + @manager.app.task(bind=True) + def grandchild_task(task_self): + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'grandchild_task' + } + + @manager.app.task(bind=True) + def child_task(task_self): + + # Call grandchild task via apply() + grandchild_data = grandchild_task.apply().get() + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'child_task', + 'grandchild_data': grandchild_data + } + + @manager.app.task(bind=True) + def parent_task(task_self): + # Call child task via apply() + child_data = child_task.apply().get() + parent_data = { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'parent_task', + 'child_data': child_data + } + return parent_data + + result = parent_task.apply() + + parent_data = result.get() + child_data = parent_data['child_data'] + grandchild_data = child_data['grandchild_data'] + + # Verify parent task + assert parent_data['name'] == 'parent_task' + assert parent_data['parent_id'] is None + assert parent_data['root_id'] == parent_data['task_id'] + + # Verify child task + assert child_data['name'] == 'child_task' + assert child_data['parent_id'] == parent_data['task_id'] + assert child_data['root_id'] == parent_data['task_id'] + + # Verify grandchild task + assert grandchild_data['name'] == 'grandchild_task' + assert grandchild_data['parent_id'] == child_data['task_id'] + assert grandchild_data['root_id'] == parent_data['task_id'] + + class test_trace_log_arguments: args = "CUSTOM ARGS" kwargs = "CUSTOM KWARGS" diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 7462313c74f..720394641c8 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -1503,6 +1503,115 @@ def test_apply_simulates_delivery_info(self): 'priority': 4, } + def test_apply_single_task_ids(self): + """Test that a single task called via apply() has correct IDs.""" + + @self.app.task(bind=True) + def simple_task(task_self): + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + } + + result = simple_task.apply() + assert isinstance(result, EagerResult) + + data = result.get() + + # Single task should have no parent and root_id should equal task_id + assert data['parent_id'] is None + assert data['root_id'] == data['task_id'] + + def test_apply_nested_parent_child_relationship(self): + """Test parent-child relationship when one task calls another via apply().""" + + @self.app.task(bind=True) + def grandchild_task(task_self): + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'grandchild_task' + } + + @self.app.task(bind=True) + def child_task(task_self): + + # Call grandchild task via apply() + grandchild_data = grandchild_task.apply().get() + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'child_task', + 'grandchild_data': grandchild_data + } + + @self.app.task(bind=True) + def parent_task(task_self): + # Call child task via apply() + child_data = child_task.apply().get() + parent_data = { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + 'name': 'parent_task', + 'child_data': child_data + } + return parent_data + + result = parent_task.apply() + assert isinstance(result, EagerResult) + + parent_data = result.get() + child_data = parent_data['child_data'] + grandchild_data = child_data['grandchild_data'] + + # Verify parent task + assert parent_data['name'] == 'parent_task' + assert parent_data['parent_id'] is None + assert parent_data['root_id'] == parent_data['task_id'] + + # Verify child task + assert child_data['name'] == 'child_task' + assert child_data['parent_id'] == parent_data['task_id'] + assert child_data['root_id'] == parent_data['task_id'] + + # Verify grandchild task + assert grandchild_data['name'] == 'grandchild_task' + assert grandchild_data['parent_id'] == child_data['task_id'] + assert grandchild_data['root_id'] == parent_data['task_id'] + + def test_apply_with_parent_task_no_root_id(self): + """Test apply() behavior when parent task has no root_id.""" + + @self.app.task(bind=True) + def test_task(task_self): + return { + 'task_id': task_self.request.id, + 'parent_id': task_self.request.parent_id, + 'root_id': task_self.request.root_id, + } + + # Create a mock parent task with no root_id + mock_parent = Mock() + mock_parent.request = Mock( + id='parent-id-123', + root_id=None, + callbacks=[] + ) + + # Mock _task_stack to return our mock parent + with patch('celery.app.task._task_stack') as mock_task_stack: + mock_task_stack.top = mock_parent + result = test_task.apply() + data = result.get() + + # Should use current task_id as root_id when parent has no root_id + assert data['parent_id'] == 'parent-id-123' + assert data['root_id'] == data['task_id'] + class test_apply_async(TasksCase): def common_send_task_arguments(self): From 23521b1db66d538a6f4686a39248a817b74de93a Mon Sep 17 00:00:00 2001 From: Yonatan Bitton Date: Sat, 5 Jul 2025 08:35:37 +0300 Subject: [PATCH 1032/1051] Replace DelayedDelivery connection creation to use context manger (#9793) * Replace DelayedDelivery connection creation to use context manger * Fixed failing test because of usage of Mock which doesn't support context manager, replaced those places with MagicMock instead * Modify test test_start_native_delayed_delivery_topic_exchange to check connection context was used --- celery/worker/consumer/delayed_delivery.py | 50 +++++++++---------- t/unit/worker/test_native_delayed_delivery.py | 17 ++++--- 2 files changed, 36 insertions(+), 31 deletions(-) diff --git a/celery/worker/consumer/delayed_delivery.py b/celery/worker/consumer/delayed_delivery.py index d71cd6b56fa..b9d37a12511 100644 --- a/celery/worker/consumer/delayed_delivery.py +++ b/celery/worker/consumer/delayed_delivery.py @@ -114,33 +114,33 @@ def _setup_delayed_delivery(self, c: Consumer, broker_url: str) -> None: OSError: If there are network-related issues Exception: For other unexpected errors during setup """ - connection: Connection = c.app.connection_for_write(url=broker_url) - queue_type = c.app.conf.broker_native_delayed_delivery_queue_type - logger.debug( - "Setting up delayed delivery for broker %r with queue type %r", - broker_url, queue_type - ) - - try: - declare_native_delayed_delivery_exchanges_and_queues( - connection, - queue_type + with c.app.connection_for_write(url=broker_url) as connection: + queue_type = c.app.conf.broker_native_delayed_delivery_queue_type + logger.debug( + "Setting up delayed delivery for broker %r with queue type %r", + broker_url, queue_type ) - except Exception as e: - logger.warning( - "Failed to declare exchanges and queues for %r: %s", - broker_url, str(e) - ) - raise - try: - self._bind_queues(c.app, connection) - except Exception as e: - logger.warning( - "Failed to bind queues for %r: %s", - broker_url, str(e) - ) - raise + try: + declare_native_delayed_delivery_exchanges_and_queues( + connection, + queue_type + ) + except Exception as e: + logger.warning( + "Failed to declare exchanges and queues for %r: %s", + broker_url, str(e) + ) + raise + + try: + self._bind_queues(c.app, connection) + except Exception as e: + logger.warning( + "Failed to bind queues for %r: %s", + broker_url, str(e) + ) + raise def _bind_queues(self, app: Celery, connection: Connection) -> None: """Bind all application queues to delayed delivery exchanges. diff --git a/t/unit/worker/test_native_delayed_delivery.py b/t/unit/worker/test_native_delayed_delivery.py index 63d1950f17e..654d7c15ab7 100644 --- a/t/unit/worker/test_native_delayed_delivery.py +++ b/t/unit/worker/test_native_delayed_delivery.py @@ -1,7 +1,7 @@ import itertools from logging import LogRecord from typing import Iterator -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from kombu import Exchange, Queue @@ -28,7 +28,7 @@ def test_include_if_quorum_queues_detected(self, _): assert delayed_delivery.include_if(consumer_mock) is True def test_start_native_delayed_delivery_direct_exchange(self, caplog): - consumer_mock = Mock() + consumer_mock = MagicMock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' consumer_mock.app.conf.broker_url = 'amqp://' consumer_mock.app.amqp.queues = { @@ -56,15 +56,20 @@ def test_start_native_delayed_delivery_topic_exchange(self, caplog): consumer_mock.app.amqp.queues = { 'celery': Queue('celery', exchange=Exchange('celery', type='topic')) } + connection = MagicMock() + consumer_mock.app.connection_for_write.return_value = connection delayed_delivery = DelayedDelivery(consumer_mock) delayed_delivery.start(consumer_mock) assert len(caplog.records) == 0 + # Verify connection context was called + assert connection.__enter__.called + assert connection.__exit__.called def test_start_native_delayed_delivery_fanout_exchange(self, caplog): - consumer_mock = Mock() + consumer_mock = MagicMock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' consumer_mock.app.conf.broker_url = 'amqp://' consumer_mock.app.amqp.queues = { @@ -237,7 +242,7 @@ def wrapped_on_retry(exc, interval_range, intervals_count): assert isinstance(value, float), f"Expected float, got {type(value)}" def test_start_with_no_queues(self, caplog): - consumer_mock = Mock() + consumer_mock = MagicMock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' consumer_mock.app.conf.broker_url = 'amqp://' consumer_mock.app.amqp.queues = {} @@ -264,7 +269,7 @@ def test_start_configuration_validation_error(self, caplog): @patch('celery.worker.consumer.delayed_delivery.declare_native_delayed_delivery_exchanges_and_queues') def test_setup_declare_error(self, mock_declare, caplog): - consumer_mock = Mock() + consumer_mock = MagicMock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' consumer_mock.app.conf.broker_url = 'amqp://' consumer_mock.app.amqp.queues = { @@ -284,7 +289,7 @@ def test_setup_declare_error(self, mock_declare, caplog): @patch('celery.worker.consumer.delayed_delivery.bind_queue_to_native_delayed_delivery_exchange') def test_setup_bind_error(self, mock_bind, caplog): - consumer_mock = Mock() + consumer_mock = MagicMock() consumer_mock.app.conf.broker_native_delayed_delivery_queue_type = 'classic' consumer_mock.app.conf.broker_url = 'amqp://' consumer_mock.app.amqp.queues = { From 6fca4fb03a29f394d787f11491c2287086626154 Mon Sep 17 00:00:00 2001 From: David Trowbridge Date: Sat, 5 Jul 2025 00:13:04 -0600 Subject: [PATCH 1033/1051] Fix #9794: Pydantic integration fails with __future__.annotations. (#9795) * Fix #9794: Pydantic integration fails with __future__.annotations. When a project uses `from __future__ import annotations`, all annotations will be stored as strings. This is fairly common, and many projects dictate that any use of type annotations must be accompanied by this import. The Pydantic integration in Celery introspects the annotations to check if any parameters and/or the return type are subclasses of `pydantic.BaseModel`. This fails when the annotations are `str` instead of the actual class. This change fixes the issue by optimistically using `typing.get_type_hints()` instead of relying on the annotations included in the result of `inspect.signature()`. This works in most cases, although there can be cases where `get_type_hints()` fails due to circular import chains. In this case, we fall back to the old implementation. A new test has been added to t/integration/test_tasks.py to validate the issue. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Asif Saif Uddin --- celery/app/base.py | 21 +++++++++++++++++++-- t/integration/tasks.py | 7 +++++++ t/integration/test_tasks.py | 20 +++++++++++++++++--- 3 files changed, 43 insertions(+), 5 deletions(-) diff --git a/celery/app/base.py b/celery/app/base.py index a4d1c4cd8c9..71ce9329d81 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -124,14 +124,27 @@ def pydantic_wrapper( dump_kwargs = {} dump_kwargs.setdefault('mode', 'json') + # If a file uses `from __future__ import annotations`, all annotations will + # be strings. `typing.get_type_hints()` can turn these back into real + # types, but can also sometimes fail due to circular imports. Try that + # first, and fall back to annotations from `inspect.signature()`. task_signature = inspect.signature(task_fun) + try: + type_hints = typing.get_type_hints(task_fun) + except (NameError, AttributeError, TypeError): + # Fall back to raw annotations from inspect if get_type_hints fails + type_hints = None + @functools.wraps(task_fun) def wrapper(*task_args, **task_kwargs): # Validate task parameters if type hinted as BaseModel bound_args = task_signature.bind(*task_args, **task_kwargs) for arg_name, arg_value in bound_args.arguments.items(): - arg_annotation = task_signature.parameters[arg_name].annotation + if type_hints and arg_name in type_hints: + arg_annotation = type_hints[arg_name] + else: + arg_annotation = task_signature.parameters[arg_name].annotation optional_arg = get_optional_arg(arg_annotation) if optional_arg is not None and arg_value is not None: @@ -149,7 +162,11 @@ def wrapper(*task_args, **task_kwargs): # Dump Pydantic model if the returned value is an instance of pydantic.BaseModel *and* its # class matches the typehint - return_annotation = task_signature.return_annotation + if type_hints and 'return' in type_hints: + return_annotation = type_hints['return'] + else: + return_annotation = task_signature.return_annotation + optional_return_annotation = get_optional_arg(return_annotation) if optional_return_annotation is not None: return_annotation = optional_return_annotation diff --git a/t/integration/tasks.py b/t/integration/tasks.py index 031c89e002e..ff823b96cbc 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -494,6 +494,13 @@ def add_pydantic(data: AddParameterModel) -> AddResultModel: return AddResultModel(result=value) +@shared_task(pydantic=True) +def add_pydantic_string_annotations(data: "AddParameterModel") -> "AddResultModel": + """Add two numbers, but with string-annotated Pydantic models (__future__.annotations bug).""" + value = data.x + data.y + return AddResultModel(result=value) + + if LEGACY_TASKS_DISABLED: class StampOnReplace(StampingVisitor): stamp = {"StampOnReplace": "This is the replaced task"} diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index cd2bd25a36b..1f6a0499018 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -16,9 +16,9 @@ from .conftest import TEST_BACKEND, get_active_redis_channels, get_redis_connection from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add, add_ignore_result, add_not_typed, add_pydantic, - fail, fail_unpickleable, print_unicode, retry, retry_once, retry_once_headers, - retry_once_priority, retry_unpickleable, return_properties, second_order_replace1, sleeping, - soft_time_limit_must_exceed_time_limit) + add_pydantic_string_annotations, fail, fail_unpickleable, print_unicode, retry, retry_once, + retry_once_headers, retry_once_priority, retry_unpickleable, return_properties, + second_order_replace1, sleeping, soft_time_limit_must_exceed_time_limit) TIMEOUT = 10 @@ -141,6 +141,20 @@ def test_pydantic_annotations(self, manager): assert result.ready() is True assert result.successful() is True + @flaky + def test_pydantic_string_annotations(self, manager): + """Tests task call with string-annotated Pydantic model.""" + results = [] + # Tests calling task only with args + for i in range(10): + results.append([i + i, add_pydantic_string_annotations.delay({'x': i, 'y': i})]) + for expected, result in results: + value = result.get(timeout=10) + assert value == {'result': expected} + assert result.status == 'SUCCESS' + assert result.ready() is True + assert result.successful() is True + @flaky def test_timeout(self, manager): """Testing timeout of getting results from tasks.""" From 46ccf460a9a21c9488d849ce1a2715038ad1130e Mon Sep 17 00:00:00 2001 From: Darix SAMANI SIEWE Date: Sun, 6 Jul 2025 16:14:13 +0100 Subject: [PATCH 1034/1051] add go and rust implementation in docs (#9800) * add go and rust implementation in docs * Update docs/getting-started/introduction.rst * Update docs/getting-started/introduction.rst * Update docs/getting-started/introduction.rst --------- Co-authored-by: Asif Saif Uddin --- docs/getting-started/introduction.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst index b3d47f3a2b0..a937a6279a9 100644 --- a/docs/getting-started/introduction.rst +++ b/docs/getting-started/introduction.rst @@ -25,15 +25,17 @@ A Celery system can consist of multiple workers and brokers, giving way to high availability and horizontal scaling. Celery is written in Python, but the protocol can be implemented in any -language. In addition to Python there's node-celery_ and node-celery-ts_ for Node.js, -and a `PHP client`_. +language. In addition to Python there's node-celery_ for Node.js, +a `PHP client`_, `gocelery`_, `gopher-celery`_ for Go, and `rusty-celery`_ for Rust. Language interoperability can also be achieved exposing an HTTP endpoint and having a task that requests it (webhooks). -.. _`PHP client`: https://github.com/gjedeer/celery-php .. _node-celery: https://github.com/mher/node-celery -.. _node-celery-ts: https://github.com/IBM/node-celery-ts +.. _`PHP client`: https://github.com/gjedeer/celery-php +.. _`gocelery`: https://github.com/gocelery/gocelery +.. _`gopher-celery`: https://github.com/marselester/gopher-celery +.. _`rusty-celery`: https://github.com/rusty-celery/rusty-celery What do I need? =============== From 4c7443d1c36e826d109cf69320cd86ae5be12ad7 Mon Sep 17 00:00:00 2001 From: Jai Date: Mon, 7 Jul 2025 09:38:31 +0530 Subject: [PATCH 1035/1051] Fix memory leak in exception handling (Issue #8882) (#9799) * Fix memory leak in exception handling (Issue #8882) - Enhanced traceback cleanup in celery/app/trace.py to prevent memory leaks - Added proper cleanup of ExceptionInfo objects and traceback references - Optimized traceback_clear() function by removing redundant f_locals access - Added comprehensive memory leak test suite in t/integration/test_memory_leak_8882.py - Fixed code quality issues: removed unused imports, cleaned whitespace, added noqa comments Memory usage improvement: 92% reduction (from ~70MB to ~0.6MB for 500 failing tasks) Addresses reference cycles that prevent garbage collection of traceback frames. All pre-commit hooks passing. * Better file name * Update t/integration/test_memory_leak_8882.py * Update t/integration/test_memory_leak_8882.py * Review changes: Add unit test * Update celery/app/trace.py * Review comments: Clear exceptions * separate the unit and integration tests * Update t/integration/test_mem_leak_in_exception_handling.py * Update t/integration/test_mem_leak_in_exception_handling.py * Update t/integration/test_mem_leak_in_exception_handling.py * Update t/integration/test_mem_leak_in_exception_handling.py * Update celery/app/trace.py * Update t/integration/test_mem_leak_in_exception_handling.py * Update t/integration/test_mem_leak_in_exception_handling.py * Update celery/app/trace.py * Update t/integration/test_mem_leak_in_exception_handling.py * precommit fix --------- Co-authored-by: Asif Saif Uddin --- celery/app/trace.py | 99 +++++-- .../test_mem_leak_in_exception_handling.py | 261 ++++++++++++++++++ t/unit/app/test_trace.py | 134 +++++++++ 3 files changed, 466 insertions(+), 28 deletions(-) create mode 100644 t/integration/test_mem_leak_in_exception_handling.py create mode 100644 t/unit/app/test_trace.py diff --git a/celery/app/trace.py b/celery/app/trace.py index 2e8cf8a3181..b6289709365 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -190,6 +190,7 @@ def handle_retry(self, task, req, store_errors=True, **kwargs): # the exception raised is the Retry semi-predicate, # and it's exc' attribute is the original exception raised (if any). type_, _, tb = sys.exc_info() + einfo = None try: reason = self.retval einfo = ExceptionInfo((type_, reason, tb)) @@ -205,39 +206,56 @@ def handle_retry(self, task, req, store_errors=True, **kwargs): 'name': get_task_name(req, task.name), 'exc': str(reason), }) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) + traceback_clear(einfo.exception) return einfo finally: - del tb + # MEMORY LEAK FIX: Clean up direct traceback reference to prevent + # retention of frame objects and their local variables (Issue #8882) + if tb is not None: + del tb def handle_failure(self, task, req, store_errors=True, call_errbacks=True): """Handle exception.""" orig_exc = self.retval + tb_ref = None - exc = get_pickleable_exception(orig_exc) - if exc.__traceback__ is None: - # `get_pickleable_exception` may have created a new exception without - # a traceback. - _, _, exc.__traceback__ = sys.exc_info() - - exc_type = get_pickleable_etype(type(orig_exc)) - - # make sure we only send pickleable exceptions back to parent. - einfo = ExceptionInfo(exc_info=(exc_type, exc, exc.__traceback__)) - - task.backend.mark_as_failure( - req.id, exc, einfo.traceback, - request=req, store_result=store_errors, - call_errbacks=call_errbacks, - ) - - task.on_failure(exc, req.id, req.args, req.kwargs, einfo) - signals.task_failure.send(sender=task, task_id=req.id, - exception=exc, args=req.args, - kwargs=req.kwargs, - traceback=exc.__traceback__, - einfo=einfo) - self._log_error(task, req, einfo) - return einfo + try: + exc = get_pickleable_exception(orig_exc) + if exc.__traceback__ is None: + # `get_pickleable_exception` may have created a new exception without + # a traceback. + _, _, tb_ref = sys.exc_info() + exc.__traceback__ = tb_ref + + exc_type = get_pickleable_etype(type(orig_exc)) + + # make sure we only send pickleable exceptions back to parent. + einfo = ExceptionInfo(exc_info=(exc_type, exc, exc.__traceback__)) + + task.backend.mark_as_failure( + req.id, exc, einfo.traceback, + request=req, store_result=store_errors, + call_errbacks=call_errbacks, + ) + + task.on_failure(exc, req.id, req.args, req.kwargs, einfo) + signals.task_failure.send(sender=task, task_id=req.id, + exception=exc, args=req.args, + kwargs=req.kwargs, + traceback=exc.__traceback__, + einfo=einfo) + self._log_error(task, req, einfo) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) + traceback_clear(exc) + # Note: We return einfo, so we can't clean it up here + # The calling function is responsible for cleanup + return einfo + finally: + # MEMORY LEAK FIX: Clean up any direct traceback references we may have created + # to prevent retention of frame objects and their local variables (Issue #8882) + if tb_ref is not None: + del tb_ref def _log_error(self, task, req, einfo): eobj = einfo.exception = get_pickled_exception(einfo.exception) @@ -270,6 +288,12 @@ def _log_error(self, task, req, einfo): def traceback_clear(exc=None): + """Clear traceback frames to prevent memory leaks. + + MEMORY LEAK FIX: This function helps break reference cycles between + traceback objects and frame objects that can prevent garbage collection. + Clearing frames releases local variables that may be holding large objects. + """ # Cleared Tb, but einfo still has a reference to Traceback. # exc cleans up the Traceback at the last moment that can be revealed. tb = None @@ -283,8 +307,10 @@ def traceback_clear(exc=None): while tb is not None: try: + # MEMORY LEAK FIX: tb.tb_frame.clear() clears ALL frame data including + # local variables, which is more efficient than accessing f_locals separately. + # Removed redundant tb.tb_frame.f_locals access that was creating unnecessary references. tb.tb_frame.clear() - tb.tb_frame.f_locals except RuntimeError: # Ignore the exception raised if the frame is still executing. pass @@ -456,18 +482,22 @@ def trace_task(uuid, args, kwargs, request=None): I, R = Info(REJECTED, exc), ExceptionInfo(internal=True) state, retval = I.state, I.retval I.handle_reject(task, task_request) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) traceback_clear(exc) except Ignore as exc: I, R = Info(IGNORED, exc), ExceptionInfo(internal=True) state, retval = I.state, I.retval I.handle_ignore(task, task_request) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) traceback_clear(exc) except Retry as exc: I, R, state, retval = on_error( task_request, exc, RETRY, call_errbacks=False) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) traceback_clear(exc) except Exception as exc: I, R, state, retval = on_error(task_request, exc) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) traceback_clear(exc) except BaseException: raise @@ -522,6 +552,8 @@ def trace_task(uuid, args, kwargs, request=None): ) except EncodeError as exc: I, R, state, retval = on_error(task_request, exc) + # MEMORY LEAK FIX: Clear traceback frames to prevent memory retention (Issue #8882) + traceback_clear(exc) else: Rstr = saferepr(R, resultrepr_maxsize) T = monotonic() - time_start @@ -591,6 +623,8 @@ def trace_task(task, uuid, args, kwargs, request=None, **opts): def _signal_internal_error(task, uuid, args, kwargs, request, exc): """Send a special `internal_error` signal to the app for outside body errors.""" + tb = None + einfo = None try: _, _, tb = sys.exc_info() einfo = ExceptionInfo() @@ -607,7 +641,16 @@ def _signal_internal_error(task, uuid, args, kwargs, request, exc): einfo=einfo, ) finally: - del tb + # MEMORY LEAK FIX: Clean up local references to prevent memory leaks (Issue #8882) + # Both 'tb' and 'einfo' can hold references to frame objects and their local variables. + # Explicitly clearing these prevents reference cycles that block garbage collection. + if tb is not None: + del tb + if einfo is not None: + # Clear traceback frames to ensure consistent cleanup + traceback_clear(einfo.exception) + # Break potential reference cycles by deleting the einfo object + del einfo def trace_task_ret(name, uuid, request, body, content_type, diff --git a/t/integration/test_mem_leak_in_exception_handling.py b/t/integration/test_mem_leak_in_exception_handling.py new file mode 100644 index 00000000000..6ec38d0bfc3 --- /dev/null +++ b/t/integration/test_mem_leak_in_exception_handling.py @@ -0,0 +1,261 @@ +""" +Integration tests for memory leak issue #8882. + +These tests reproduce memory leak scenarios that occur when Celery tasks +raise unhandled exceptions, causing ExceptionInfo objects to not be +properly garbage collected. +""" + +import gc +import logging +import os +import tracemalloc + +from celery import Celery + +logger = logging.getLogger(__name__) + + +class MemoryLeakUnhandledExceptionsTest: + """Test class for memory leak scenarios with unhandled exceptions.""" + + def __init__(self): + self.app = Celery('test_memory_leak') + self.app.conf.update( + broker_url='memory://', + result_backend='cache+memory://', + task_always_eager=True, + task_eager_propagates=True, + task_store_eager_result=True, + ) + self.setup_tasks() + + def setup_tasks(self): + """Setup test tasks.""" + + @self.app.task + def task_success(): + """Task that completes successfully - baseline for memory comparison.""" + return "success" + + @self.app.task + def task_unhandled_exception(): + """Task that raises an unhandled RuntimeError exception.""" + raise RuntimeError("Unhandled exception for memory leak test") + + @self.app.task(bind=True, max_retries=3) + def task_retry_then_fail(self): + """Task that retries multiple times and eventually fails with unhandled exception.""" + if self.request.retries < self.max_retries: + raise self.retry(countdown=0.001) + raise RuntimeError("Final retry failure - unhandled exception") + + @self.app.task + def task_nested_exception_stack(): + """Task that raises exception through deeply nested function calls.""" + def deep_level_5(): + local_data = {"level": 5, "data": list(range(100))} # noqa: F841 + raise ValueError("Deep nested exception at level 5") + + def deep_level_4(): + local_data = {"level": 4, "nested": {"data": list(range(50))}} # noqa: F841 + deep_level_5() + + def deep_level_3(): + local_data = [1, 2, 3, {"nested": True}] # noqa: F841 + deep_level_4() + + def deep_level_2(): + deep_level_3() + + def deep_level_1(): + deep_level_2() + + deep_level_1() + + self.task_success = task_success + self.task_unhandled_exception = task_unhandled_exception + self.task_retry_then_fail = task_retry_then_fail + self.task_nested_exception_stack = task_nested_exception_stack + + +def get_memory_usage(): + """ + Get current memory usage in bytes. + + Returns RSS (total process memory) if psutil is available, + otherwise returns Python heap allocations via tracemalloc. + Note: These measurements are not directly comparable. + """ + try: + import psutil + process = psutil.Process(os.getpid()) + return process.memory_info().rss + except ImportError: + # Fallback to tracemalloc if psutil not available + current, peak = tracemalloc.get_traced_memory() + return current + + +def test_mem_leak_unhandled_exceptions(): + """Test that reproduces the memory leak when tasks raise unhandled exceptions.""" + + # Setup + test_instance = MemoryLeakUnhandledExceptionsTest() + + # Enable memory tracing + tracemalloc.start() + + # Warm up - run some successful tasks first + for _ in range(50): + try: + test_instance.task_success.apply() + except Exception: + pass + + # Force garbage collection and get baseline memory + gc.collect() + baseline_memory = get_memory_usage() + + # Run many failing tasks - this should demonstrate the leak + exception_count = 0 + for _ in range(500): # Reduced from 1000 to make test faster + try: + test_instance.task_unhandled_exception.apply() + except Exception: + exception_count += 1 + + # Force garbage collection + gc.collect() + after_exceptions_memory = get_memory_usage() + + # Run successful tasks again to ensure the leak is from exceptions + for _ in range(50): + try: + test_instance.task_success.apply() + except Exception: + pass + + gc.collect() + final_memory = get_memory_usage() + + # Calculate memory increase + memory_increase = after_exceptions_memory - baseline_memory + + # Stop tracing + tracemalloc.stop() + + # Log memory statistics for debugging + logger.debug("--- Memory Statistics ---") # Separator for better readability + logger.debug(f"Baseline memory: {baseline_memory / 1024 / 1024:.2f} MB") + logger.debug(f"After exceptions: {after_exceptions_memory / 1024 / 1024:.2f} MB") + logger.debug(f"Final memory: {final_memory / 1024 / 1024:.2f} MB") + logger.debug(f"Memory increase: {memory_increase / 1024 / 1024:.2f} MB") + logger.debug(f"Exceptions processed: {exception_count}") + + # The test should demonstrate a significant memory increase + # This threshold may need adjustment based on the system + memory_increase_mb = memory_increase / 1024 / 1024 + + # Verify the memory leak is fixed - memory increase should be minimal + # Before fix: >70MB for 1000 tasks (~70KB/task) + # After fix: <5MB for 500 tasks (<10KB/task) + threshold_percent = float(os.getenv("MEMORY_LEAK_THRESHOLD_PERCENT", 10)) # Default: 10% increase + memory_threshold_mb = baseline_memory / 1024 / 1024 * (threshold_percent / 100) + assert memory_increase_mb < memory_threshold_mb, ( + f"Memory leak still exists! Expected <{memory_threshold_mb:.2f}MB increase " + f"(based on {threshold_percent}% of baseline), " + f"but got {memory_increase_mb:.2f}MB. " + f"This indicates the memory leak fix is not working properly." + ) + + +def test_mem_leak_retry_failures(): + """Test memory leak with task retry and eventual failure scenarios.""" + + test_instance = MemoryLeakUnhandledExceptionsTest() + + # Enable memory tracing + tracemalloc.start() + + # Get baseline + gc.collect() + baseline_memory = get_memory_usage() + + # Run tasks that retry and eventually fail + for _ in range(100): # Fewer iterations since retries are expensive + try: + test_instance.task_retry_then_fail.apply() + except Exception: + pass + + gc.collect() + after_retries_memory = get_memory_usage() + + # Stop tracing + tracemalloc.stop() + + # Calculate memory increase + memory_increase = after_retries_memory - baseline_memory + memory_increase_mb = memory_increase / 1024 / 1024 + + logger.debug("") # New line for better readability + logger.debug(f"Baseline memory: {baseline_memory / 1024 / 1024:.2f} MB") + logger.debug(f"After retries: {after_retries_memory / 1024 / 1024:.2f} MB") + logger.debug(f"Memory increase: {memory_increase_mb:.2f} MB") + + # Retries should not show significant memory increase if fix is working + assert memory_increase_mb < 3, ( + f"Memory leak in retry scenarios! Expected <3MB increase for 100 retry tasks, " + f"but got {memory_increase_mb:.2f}MB" + ) + + +def test_mem_leak_nested_exception_stacks(): + """Test memory leak with deeply nested exception stacks and local variables.""" + + test_instance = MemoryLeakUnhandledExceptionsTest() + + # Enable memory tracing + tracemalloc.start() + + # Get baseline + gc.collect() + baseline_memory = get_memory_usage() + + # Run tasks with complex exception stacks + for _ in range(200): + try: + test_instance.task_nested_exception_stack.apply() + except Exception: + pass + + gc.collect() + after_complex_memory = get_memory_usage() + + # Stop tracing + tracemalloc.stop() + + # Calculate memory increase + memory_increase = after_complex_memory - baseline_memory + memory_increase_mb = memory_increase / 1024 / 1024 + + logger.debug("Memory usage results:") + logger.debug(f"Baseline memory: {baseline_memory / 1024 / 1024:.2f} MB") + logger.debug(f"After complex exceptions: {after_complex_memory / 1024 / 1024:.2f} MB") + logger.debug(f"Memory increase: {memory_increase_mb:.2f} MB") + + # Complex exceptions should not show significant memory increase if fix is working + assert memory_increase_mb < 4, ( + f"Memory leak in nested exception scenarios! Expected <4MB increase for 200 nested tasks, " + f"but got {memory_increase_mb:.2f}MB" + ) + + +if __name__ == "__main__": + # Allow running these tests standalone for debugging + print("Running memory leak integration tests...") + test_mem_leak_unhandled_exceptions() + test_mem_leak_retry_failures() + test_mem_leak_nested_exception_stacks() + print("Memory leak integration tests completed") diff --git a/t/unit/app/test_trace.py b/t/unit/app/test_trace.py new file mode 100644 index 00000000000..b2796971fdf --- /dev/null +++ b/t/unit/app/test_trace.py @@ -0,0 +1,134 @@ +"""Unit tests for celery.app.trace module.""" + +import sys + +from celery.app.trace import traceback_clear + + +class test_traceback_clear: + """Unit tests for traceback_clear function.""" + + def test_uses_exc_argument(self): + """Test that traceback_clear(exc) correctly uses the exc argument. + + This test proves that the reported issue about traceback_clear not using + the exc argument is NOT valid. The function does use the exc argument correctly. + """ + # Create exception with traceback + def create_exception_with_traceback(): + """Create an exception with a traceback for testing.""" + try: + # Create a nested call stack to have frames to clear + def inner_function(): + x = "some_local_variable" * 1000 # Create local variable # noqa: F841 + y = list(range(1000)) # Another local variable # noqa: F841 + raise ValueError("Test exception with traceback") + + def outer_function(): + z = "outer_local_variable" * 1000 # Local variable in outer frame # noqa: F841 + inner_function() + + outer_function() + except Exception as e: + return e + + # Test 1: traceback_clear(exc) with provided exception + exc = create_exception_with_traceback() + + # Verify exception has traceback + exc_tb = getattr(exc, '__traceback__', None) + assert exc_tb is not None, "Exception should have traceback" + + # Count initial frames + initial_frames = [] + tb = exc_tb + while tb is not None: + initial_frames.append(tb.tb_frame) + tb = tb.tb_next + + assert len(initial_frames) > 0, "Should have traceback frames" + + # Verify frames have local variables before clearing + frame_locals_before = [] + for frame in initial_frames: + frame_locals_before.append(len(frame.f_locals)) + + assert any(count > 0 for count in frame_locals_before), "Frames should have local variables" + + # Call traceback_clear with the exception - this should use exc argument + traceback_clear(exc) + + # Verify frames are cleared + exc_tb_after = getattr(exc, '__traceback__', None) + assert exc_tb_after is not None, "Traceback should still exist after clearing" + + tb = exc_tb_after + frames_after = [] + while tb is not None: + frames_after.append(tb.tb_frame) + tb = tb.tb_next + + # Check that frame locals are cleared + cleared_count = 0 + for frame in frames_after: + if len(frame.f_locals) == 0: + cleared_count += 1 + + assert cleared_count == len(frames_after), "All frames should be cleared" + + # Verify the function actually used the exc argument by checking traceback still exists + assert getattr(exc, '__traceback__', None) is not None, ( + "Traceback should still exist but frames should be cleared" + ) + + def test_without_exc_argument(self): + """Test traceback_clear() without exc argument uses sys.exc_info().""" + try: + def test_function(): + local_var = "test" * 1000 # noqa: F841 + raise RuntimeError("Test exception") + + test_function() + except Exception: + # Now we're in except block with active traceback + _, _, tb_before = sys.exc_info() + assert tb_before is not None, "Should have active traceback" + + # Call traceback_clear without argument - should use sys.exc_info() + traceback_clear() + # Test passes if no exception is raised + + def test_with_none(self): + """Test traceback_clear(None) uses sys.exc_info() fallback.""" + try: + def test_function(): + local_var = "test" * 1000 # noqa: F841 + raise RuntimeError("Test exception") + + test_function() + except Exception: + # Call with None - should fall back to sys.exc_info() + traceback_clear(None) + # Test passes if no exception is raised + + def test_with_exception_no_traceback(self): + """Test traceback_clear with exception that has no __traceback__.""" + # Create exception without traceback + exc = ValueError("Test exception") + + # Should not raise exception + traceback_clear(exc) + + def test_handles_runtime_error(self): + """Test that traceback_clear handles RuntimeError when frame is executing.""" + # This test is mainly for coverage - RuntimeError handling is internal + # and difficult to trigger in normal circumstances + try: + def test_function(): + local_var = "test" * 1000 # noqa: F841 + raise RuntimeError("Test exception") + + test_function() + except Exception as exc: + # Should not raise exception even if RuntimeError occurs internally + traceback_clear(exc) From bc2485166dad32d0cfc87549e123ae1af96348ed Mon Sep 17 00:00:00 2001 From: Jai Date: Mon, 7 Jul 2025 09:42:56 +0530 Subject: [PATCH 1036/1051] Fix handlers docs (Issue #9787) (#9804) Co-authored-by: Asif Saif Uddin --- docs/userguide/tasks.rst | 148 ++++++++++++++++++++++++++++++--------- 1 file changed, 116 insertions(+), 32 deletions(-) diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 6d5d605dca6..3dfdbd58093 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -1577,10 +1577,48 @@ The default value is the class provided by Celery: ``'celery.app.task:Task'``. Handlers -------- +Task handlers are methods that execute at specific points in a task's lifecycle. +All handlers run **synchronously** within the same worker process and thread +that executes the task. + +Execution timeline +~~~~~~~~~~~~~~~~~~ + +The following diagram shows the exact order of execution: + +.. code-block:: text + + Worker Process Timeline + ┌───────────────────────────────────────────────────────────────┐ + │ 1. before_start() ← Blocks until complete │ + │ 2. run() ← Your task function │ + │ 3. [Result Backend] ← State + return value persisted │ + │ 4. on_success() OR ← Outcome-specific handler │ + │ on_retry() OR │ │ + │ on_failure() │ │ + │ 5. after_return() ← Always runs last │ + └───────────────────────────────────────────────────────────────┘ + +.. important:: + + **Key points:** + + - All handlers run in the **same worker process** as your task + - ``before_start`` **blocks** the task - ``run()`` won't start until it completes + - Result backend is updated **before** ``on_success``/``on_failure`` - other clients can see the task as finished while handlers are still running + - ``after_return`` **always** executes, regardless of task outcome + +Available handlers +~~~~~~~~~~~~~~~~~~ + .. method:: before_start(self, task_id, args, kwargs) Run by the worker before the task starts executing. + .. note:: + This handler **blocks** the task: the :py:meth:`run` method will *not* begin + until ``before_start`` returns. + .. versionadded:: 5.2 :param task_id: Unique id of the task to execute. @@ -1589,61 +1627,107 @@ Handlers The return value of this handler is ignored. -.. method:: after_return(self, status, retval, task_id, args, kwargs, einfo) +.. method:: on_success(self, retval, task_id, args, kwargs) - Handler called after the task returns. + Success handler. - :param status: Current task state. - :param retval: Task return value/exception. - :param task_id: Unique id of the task. - :param args: Original arguments for the task that returned. - :param kwargs: Original keyword arguments for the task - that returned. + Run by the worker if the task executes successfully. - :keyword einfo: :class:`~billiard.einfo.ExceptionInfo` - instance, containing the traceback (if any). + .. note:: + Invoked **after** the task result has already been persisted in the + result backend. External clients may observe the task as ``SUCCESS`` + while this handler is still running. + + :param retval: The return value of the task. + :param task_id: Unique id of the executed task. + :param args: Original arguments for the executed task. + :param kwargs: Original keyword arguments for the executed task. + + The return value of this handler is ignored. + +.. method:: on_retry(self, exc, task_id, args, kwargs, einfo) + + Retry handler. + + Run by the worker when the task is to be retried. + + .. note:: + Invoked **after** the task state has been updated to ``RETRY`` in the + result backend but **before** the retry is scheduled. + + :param exc: The exception sent to :meth:`retry`. + :param task_id: Unique id of the retried task. + :param args: Original arguments for the retried task. + :param kwargs: Original keyword arguments for the retried task. + :param einfo: :class:`~billiard.einfo.ExceptionInfo` instance. The return value of this handler is ignored. .. method:: on_failure(self, exc, task_id, args, kwargs, einfo) - This is run by the worker when the task fails. + Failure handler. + + Run by the worker when the task fails. + + .. note:: + Invoked **after** the task result has already been persisted in the + result backend with ``FAILURE`` state. External clients may observe + the task as failed while this handler is still running. :param exc: The exception raised by the task. :param task_id: Unique id of the failed task. - :param args: Original arguments for the task that failed. - :param kwargs: Original keyword arguments for the task - that failed. - - :keyword einfo: :class:`~billiard.einfo.ExceptionInfo` - instance, containing the traceback. + :param args: Original arguments for the failed task. + :param kwargs: Original keyword arguments for the failed task. + :param einfo: :class:`~billiard.einfo.ExceptionInfo` instance. The return value of this handler is ignored. -.. method:: on_retry(self, exc, task_id, args, kwargs, einfo) +.. method:: after_return(self, status, retval, task_id, args, kwargs, einfo) - This is run by the worker when the task is to be retried. + Handler called after the task returns. - :param exc: The exception sent to :meth:`~@Task.retry`. - :param task_id: Unique id of the retried task. - :param args: Original arguments for the retried task. - :param kwargs: Original keyword arguments for the retried task. + .. note:: + Executes **after** ``on_success``/``on_retry``/``on_failure``. This is the + final hook in the task lifecycle and **always** runs, regardless of outcome. - :keyword einfo: :class:`~billiard.einfo.ExceptionInfo` - instance, containing the traceback. + :param status: Current task state. + :param retval: Task return value/exception. + :param task_id: Unique id of the task. + :param args: Original arguments for the task that returned. + :param kwargs: Original keyword arguments for the task that returned. + :param einfo: :class:`~billiard.einfo.ExceptionInfo` instance. The return value of this handler is ignored. -.. method:: on_success(self, retval, task_id, args, kwargs) +Example usage +~~~~~~~~~~~~~ - Run by the worker if the task executes successfully. +.. code-block:: python - :param retval: The return value of the task. - :param task_id: Unique id of the executed task. - :param args: Original arguments for the executed task. - :param kwargs: Original keyword arguments for the executed task. + import time + from celery import Task - The return value of this handler is ignored. + class MyTask(Task): + + def before_start(self, task_id, args, kwargs): + print(f"Task {task_id} starting with args {args}") + # This blocks - run() won't start until this returns + + def on_success(self, retval, task_id, args, kwargs): + print(f"Task {task_id} succeeded with result: {retval}") + # Result is already visible to clients at this point + + def on_failure(self, exc, task_id, args, kwargs, einfo): + print(f"Task {task_id} failed: {exc}") + # Task state is already FAILURE in backend + + def after_return(self, status, retval, task_id, args, kwargs, einfo): + print(f"Task {task_id} finished with status: {status}") + # Always runs last + + @app.task(base=MyTask) + def my_task(x, y): + return x + y .. _task-requests-and-custom-requests: From 1b2583e7a9cbc376410e24fa35ed0eaca73f19d2 Mon Sep 17 00:00:00 2001 From: Colin Watson Date: Mon, 7 Jul 2025 05:26:27 +0100 Subject: [PATCH 1037/1051] Remove importlib_metadata leftovers (#9791) Celery's use of `importlib_metadata` was removed in #9612 (since Celery now requires Python 3.8 which contains `importlib.metadata` in the standard library), but a few conditional imports were left behind. Co-authored-by: Asif Saif Uddin --- celery/bin/celery.py | 6 +----- celery/utils/imports.py | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/celery/bin/celery.py b/celery/bin/celery.py index 4ddf9c7fc7a..e1fae1a7761 100644 --- a/celery/bin/celery.py +++ b/celery/bin/celery.py @@ -3,11 +3,7 @@ import pathlib import sys import traceback - -try: - from importlib.metadata import entry_points -except ImportError: - from importlib_metadata import entry_points +from importlib.metadata import entry_points import click import click.exceptions diff --git a/celery/utils/imports.py b/celery/utils/imports.py index 676a4516b8f..da86a58c7ec 100644 --- a/celery/utils/imports.py +++ b/celery/utils/imports.py @@ -4,11 +4,7 @@ import warnings from contextlib import contextmanager from importlib import import_module, reload - -try: - from importlib.metadata import entry_points -except ImportError: - from importlib_metadata import entry_points +from importlib.metadata import entry_points from kombu.utils.imports import symbol_by_name From db72f70aa3a52e513127767a188c1bcf2194067d Mon Sep 17 00:00:00 2001 From: Asif Saif Uddin Date: Mon, 7 Jul 2025 11:08:50 +0600 Subject: [PATCH 1038/1051] Update timeout minutes for smoke tests CI (#9807) --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index fa2532cdb04..473f9b64e35 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -191,7 +191,7 @@ jobs: - name: Run tox for "${{ matrix.python-version }}-smoke-${{ matrix.test-case }}" uses: nick-fields/retry@v3 with: - timeout_minutes: 20 + timeout_minutes: 30 max_attempts: 2 retry_wait_seconds: 0 command: | From bf1c98baba7431ee6a60d5972c5759699473acad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Asif=20Saif=20Uddin=20=7B=22Auvi=22=3A=22=E0=A6=85?= =?UTF-8?q?=E0=A6=AD=E0=A6=BF=22=7D?= Date: Thu, 10 Jul 2025 12:48:10 +0600 Subject: [PATCH 1039/1051] Revert "Remove dependency on `pycurl` (#9526)" (#9620) This reverts commit 9bf05461dc8de9cb88f4279799e90e1dc0688196. --- requirements/extras/sqs.txt | 2 ++ requirements/test-ci-default.txt | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt index a7be017ff2f..78ba57ff78c 100644 --- a/requirements/extras/sqs.txt +++ b/requirements/extras/sqs.txt @@ -1,3 +1,5 @@ boto3>=1.26.143 +pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" +pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" urllib3>=1.26.16 kombu[sqs]>=5.5.0 diff --git a/requirements/test-ci-default.txt b/requirements/test-ci-default.txt index e689866e245..78994fa8e45 100644 --- a/requirements/test-ci-default.txt +++ b/requirements/test-ci-default.txt @@ -21,4 +21,5 @@ git+https://github.com/celery/kombu.git # SQS dependencies other than boto -urllib3>=1.26.16 +pycurl>=7.43.0.5,<7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version < "3.9" +pycurl>=7.45.4; sys_platform != 'win32' and platform_python_implementation=="CPython" and python_version >= "3.9" From b19cdbb707504af8c8d4f51bab2102d285329516 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 1 Aug 2025 02:40:02 +0300 Subject: [PATCH 1040/1051] Add Blacksmith Docker layer caching to all Docker builds (#9840) --- .github/workflows/docker.yml | 20 ++++++++++++++++++++ .github/workflows/python-package.yml | 4 ++++ 2 files changed, 24 insertions(+) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 4f04a34cc2c..a3609aa3eba 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,6 +30,10 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: Build Docker container run: make docker-build @@ -38,6 +42,10 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: "Build smoke tests container: dev" run: docker build -f t/smoke/workers/docker/dev . @@ -46,6 +54,10 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: "Build smoke tests container: latest" run: docker build -f t/smoke/workers/docker/pypi . @@ -54,6 +66,10 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: "Build smoke tests container: pypi" run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="5" . @@ -62,5 +78,9 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: "Build smoke tests container: legacy" run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="4" . diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 473f9b64e35..fbb15b23490 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -177,6 +177,10 @@ jobs: sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 + - name: Setup Blacksmith Docker caching + uses: useblacksmith/build-push-action@v1 + with: + setup-only: true - name: Set up Python ${{ matrix.python-version }} uses: useblacksmith/setup-python@v6 with: From 04337f8bdf127279d5f62a46f08bb690f730b4b2 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Fri, 1 Aug 2025 03:20:04 +0300 Subject: [PATCH 1041/1051] Bump Kombu to v5.6.0b1 (#9839) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index fc85b911128..7e4b1ea24bd 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.5.2,<5.6 +kombu>=5.6.0b1,<5.7 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0 From 4841c99b27d0ac720805553b3e05be36c0a4f652 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Aug 2025 03:09:14 +0300 Subject: [PATCH 1042/1051] Disable pytest-xdist for smoke tests and increase retries (CI ONLY) (#9842) --- .github/workflows/python-package.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index fbb15b23490..69547c1f5b8 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -195,8 +195,8 @@ jobs: - name: Run tox for "${{ matrix.python-version }}-smoke-${{ matrix.test-case }}" uses: nick-fields/retry@v3 with: - timeout_minutes: 30 - max_attempts: 2 - retry_wait_seconds: 0 + timeout_minutes: 20 + max_attempts: 5 + retry_wait_seconds: 60 command: | - tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -n auto -k ${{ matrix.test-case }} + tox --verbose --verbose -e "${{ matrix.python-version }}-smoke" -- -k ${{ matrix.test-case }} From 46443dc86df23fc2c0aacadbb98ce14160fc58ec Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Sun, 3 Aug 2025 23:02:47 +0300 Subject: [PATCH 1043/1051] Fix Python 3.13 compatibility in events dumper (#9826) Replace deprecated datetime.utcfromtimestamp() with datetime.fromtimestamp() using timezone.utc. The deprecated method was removed in Python 3.12+. Also fix test timezone handling to create proper UTC timestamps and update assertions to expect timezone-aware datetime format. Fixes failing tests: - test_on_event_task_received - test_on_event_non_task --- celery/events/dumper.py | 4 ++-- t/unit/events/test_dumper.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/celery/events/dumper.py b/celery/events/dumper.py index 24c7b3e9421..08ee12027ca 100644 --- a/celery/events/dumper.py +++ b/celery/events/dumper.py @@ -4,7 +4,7 @@ as they happen. Think of it like a `tcpdump` for Celery events. """ import sys -from datetime import datetime +from datetime import datetime, timezone from celery.app import app_or_default from celery.utils.functional import LRUCache @@ -48,7 +48,7 @@ def say(self, msg): pass def on_event(self, ev): - timestamp = datetime.utcfromtimestamp(ev.pop('timestamp')) + timestamp = datetime.fromtimestamp(ev.pop('timestamp'), timezone.utc) type = ev.pop('type').lower() hostname = ev.pop('hostname') if type.startswith('task-'): diff --git a/t/unit/events/test_dumper.py b/t/unit/events/test_dumper.py index e6f8a577e99..eb259db49d3 100644 --- a/t/unit/events/test_dumper.py +++ b/t/unit/events/test_dumper.py @@ -1,5 +1,5 @@ import io -from datetime import datetime +from datetime import datetime, timezone from celery.events import dumper @@ -39,7 +39,7 @@ def test_on_event_task_received(): buf = io.StringIO() d = dumper.Dumper(out=buf) event = { - 'timestamp': datetime(2024, 1, 1, 12, 0, 0).timestamp(), + 'timestamp': datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc).timestamp(), 'type': 'task-received', 'hostname': 'worker1', 'uuid': 'abc', @@ -49,7 +49,7 @@ def test_on_event_task_received(): } d.on_event(event.copy()) output = buf.getvalue() - assert 'worker1 [2024-01-01 12:00:00]' in output + assert 'worker1 [2024-01-01 12:00:00+00:00]' in output assert 'task received' in output assert 'mytask(abc) args=(1,) kwargs={}' in output @@ -58,13 +58,13 @@ def test_on_event_non_task(): buf = io.StringIO() d = dumper.Dumper(out=buf) event = { - 'timestamp': datetime(2024, 1, 1, 12, 0, 0).timestamp(), + 'timestamp': datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc).timestamp(), 'type': 'worker-online', 'hostname': 'worker1', 'foo': 'bar', } d.on_event(event.copy()) output = buf.getvalue() - assert 'worker1 [2024-01-01 12:00:00]' in output + assert 'worker1 [2024-01-01 12:00:00+00:00]' in output assert 'started' in output assert 'foo=bar' in output From a8ec7fafe15200fd84ab41b6289bf169338cc6d9 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Mon, 4 Aug 2025 06:31:33 +0300 Subject: [PATCH 1044/1051] Dockerfile Build Optimizations (#9733) * Dockerfile Build Optimizations * Update docker/Dockerfile * Review Fixes --------- Co-authored-by: Asif Saif Uddin --- Makefile | 2 +- docker/Dockerfile | 172 +++++++++++++++++++++++++--------------------- 2 files changed, 96 insertions(+), 78 deletions(-) diff --git a/Makefile b/Makefile index d28ac57dcf7..6e2eb420942 100644 --- a/Makefile +++ b/Makefile @@ -168,7 +168,7 @@ authorcheck: .PHONY: docker-build docker-build: - @docker compose -f docker/docker-compose.yml build + @DOCKER_BUILDKIT=1 docker compose -f docker/docker-compose.yml build .PHONY: docker-lint docker-lint: diff --git a/docker/Dockerfile b/docker/Dockerfile index 479613ac51f..36817c1d1cc 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,7 @@ FROM debian:bookworm-slim -ENV PYTHONUNBUFFERED 1 -ENV PYTHONIOENCODING UTF-8 +ENV PYTHONUNBUFFERED=1 +ENV PYTHONIOENCODING=UTF-8 ARG DEBIAN_FRONTEND=noninteractive @@ -37,9 +37,10 @@ RUN apt-get update && apt-get install -y build-essential \ # Setup variables. Even though changing these may cause unnecessary invalidation of # unrelated elements, grouping them together makes the Dockerfile read better. -ENV PROVISIONING /provisioning +ENV PROVISIONING=/provisioning ENV PIP_NO_CACHE_DIR=off ENV PYTHONDONTWRITEBYTECODE=1 +ENV PIP_PREFER_BINARY=1 ARG CELERY_USER=developer @@ -47,7 +48,7 @@ ARG CELERY_USER=developer # Check for mandatory build arguments RUN : "${CELERY_USER:?CELERY_USER build argument needs to be set and non-empty.}" -ENV HOME /home/$CELERY_USER +ENV HOME=/home/$CELERY_USER ENV PATH="$HOME/.pyenv/bin:$PATH" # Copy and run setup scripts @@ -65,13 +66,13 @@ USER $CELERY_USER RUN curl https://pyenv.run | bash # Install required Python versions -RUN pyenv install 3.13 -RUN pyenv install 3.12 -RUN pyenv install 3.11 -RUN pyenv install 3.10 -RUN pyenv install 3.9 -RUN pyenv install 3.8 -RUN pyenv install pypy3.10 +RUN pyenv install 3.13 && \ + pyenv install 3.12 && \ + pyenv install 3.11 && \ + pyenv install 3.10 && \ + pyenv install 3.9 && \ + pyenv install 3.8 && \ + pyenv install pypy3.10 # Set global Python versions @@ -86,7 +87,8 @@ RUN chmod gu+x /entrypoint # Define the local pyenvs RUN pyenv local 3.13 3.12 3.11 3.10 3.9 3.8 pypy3.10 -RUN pyenv exec python3.13 -m pip install --upgrade pip setuptools wheel && \ +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.13 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.12 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.11 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel && \ @@ -94,18 +96,76 @@ RUN pyenv exec python3.13 -m pip install --upgrade pip setuptools wheel && \ pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \ pyenv exec pypy3.10 -m pip install --upgrade pip setuptools wheel -COPY --chown=1000:1000 . $HOME/celery +# Install requirements first to leverage Docker layer caching +# Split into separate RUN commands to reduce memory pressure and improve layer caching +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.13 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt + +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.12 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt + +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.11 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt + +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.10 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt -RUN pyenv exec python3.13 -m pip install -e $HOME/celery && \ - pyenv exec python3.12 -m pip install -e $HOME/celery && \ - pyenv exec python3.11 -m pip install -e $HOME/celery && \ - pyenv exec python3.10 -m pip install -e $HOME/celery && \ - pyenv exec python3.9 -m pip install -e $HOME/celery && \ - pyenv exec python3.8 -m pip install -e $HOME/celery && \ - pyenv exec pypy3.10 -m pip install -e $HOME/celery +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.9 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt + +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.8 -m pip install -r requirements/default.txt \ + -r requirements/dev.txt \ + -r requirements/docs.txt \ + -r requirements/pkgutils.txt \ + -r requirements/test-ci-base.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/test-integration.txt \ + -r requirements/test-pypy3.txt \ + -r requirements/test.txt -# Setup one celery environment for basic development use -RUN pyenv exec python3.13 -m pip install -r requirements/default.txt \ +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec pypy3.10 -m pip install -r requirements/default.txt \ -r requirements/dev.txt \ -r requirements/docs.txt \ -r requirements/pkgutils.txt \ @@ -113,61 +173,19 @@ RUN pyenv exec python3.13 -m pip install -r requirements/default.txt \ -r requirements/test-ci-default.txt \ -r requirements/test-integration.txt \ -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec python3.12 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec python3.11 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec python3.10 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec python3.9 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec python3.8 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt && \ - pyenv exec pypy3.10 -m pip install -r requirements/default.txt \ - -r requirements/dev.txt \ - -r requirements/docs.txt \ - -r requirements/pkgutils.txt \ - -r requirements/test-ci-base.txt \ - -r requirements/test-ci-default.txt \ - -r requirements/test-integration.txt \ - -r requirements/test-pypy3.txt \ - -r requirements/test.txt + -r requirements/test.txt + +COPY --chown=1000:1000 . $HOME/celery + +# Install celery in editable mode (dependencies already installed above) +RUN --mount=type=cache,target=/home/$CELERY_USER/.cache/pip \ + pyenv exec python3.13 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec python3.12 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec python3.11 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec python3.10 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec python3.9 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec python3.8 -m pip install --no-deps -e $HOME/celery && \ + pyenv exec pypy3.10 -m pip install --no-deps -e $HOME/celery WORKDIR $HOME/celery From 6dcecbe52da8717c015203f5e0f6b8d684b6ccc9 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 5 Aug 2025 13:15:11 +0300 Subject: [PATCH 1045/1051] Migrated from useblacksmith/build-push-action@v1 to useblacksmith/setup-docker-builder@v1 in the CI (#9846) --- .github/workflows/docker.yml | 30 ++++++++++------------------ .github/workflows/python-package.yml | 6 ++---- 2 files changed, 12 insertions(+), 24 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index a3609aa3eba..a6cd26fbcd7 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,10 +30,8 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: Build Docker container run: make docker-build @@ -42,10 +40,8 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: dev" run: docker build -f t/smoke/workers/docker/dev . @@ -54,10 +50,8 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: latest" run: docker build -f t/smoke/workers/docker/pypi . @@ -66,10 +60,8 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: pypi" run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="5" . @@ -78,9 +70,7 @@ jobs: timeout-minutes: 10 steps: - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: legacy" run: docker build -f t/smoke/workers/docker/pypi --build-arg CELERY_VERSION="4" . diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 69547c1f5b8..44a215a5efb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -177,10 +177,8 @@ jobs: sudo sysctl -w vm.overcommit_memory=1 - uses: actions/checkout@v4 - - name: Setup Blacksmith Docker caching - uses: useblacksmith/build-push-action@v1 - with: - setup-only: true + - name: Setup Docker Builder + uses: useblacksmith/setup-docker-builder@v1 - name: Set up Python ${{ matrix.python-version }} uses: useblacksmith/setup-python@v6 with: From c2b4ad1b6c3a3ce601fd2c6dd5ce5cad084a10ce Mon Sep 17 00:00:00 2001 From: YuppY Date: Fri, 8 Aug 2025 23:14:16 +0500 Subject: [PATCH 1046/1051] Remove incorrect example pytest-celery is a plugin with a different API, this page is about celery.contrib.pytest plugin. --- docs/userguide/testing.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index 5b2a5761818..1a7f353830c 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -121,10 +121,9 @@ use in your integration (or unit) test suites. Enabling -------- -Celery initially ships the plugin in a disabled state, to enable it you can either: +Celery initially ships the plugin in a disabled state. To enable it, you can either: * ``pip install celery[pytest]`` - * ``pip install pytest-celery`` * or add an environment variable ``PYTEST_PLUGINS=celery.contrib.pytest`` * or add ``pytest_plugins = ("celery.contrib.pytest", )`` to your root conftest.py From da4a80dc449301fde4355153b47af8c42caed37c Mon Sep 17 00:00:00 2001 From: Dan LaManna Date: Sun, 10 Aug 2025 03:19:25 -0400 Subject: [PATCH 1047/1051] Revert "Use Django DB max age connection setting" (#9824) * Revert "Use Django DB max age connection setting" This reverts commit f0c9b40bd4aa7228afa20f589e50f2e4225d804e. This reverts PR #6134 and stops using the close_if_unusable_or_obsolete API since there are edge cases where it's unable to detect if a connection if actually unusable. This is most obvious when Celery interrupts a query in progress via a time limit handler. Django has marked this issue as wontfix (https://code.djangoproject.com/ticket/30646). Since this is effectively an optimization for Celery that can't be reliably used, Celery ought to close the connection after each task instead of trying to manage connections in a way similar to how the Django application does. * Ensure django fixup never calls close_if_unusable_or_obsolete This API can fail to close unusable connections in certain scenarios, namely database failovers and ungraceful terminations (e.g. signal handler for time limit exceeded tasks). This makes close_if_unusable_or_obsolete adequate for HTTP request lifecycle management but inappropriate for use within celery workers. See also: https://code.djangoproject.com/ticket/30646 https://forum.djangoproject.com/t/close-if-unusable-or-obsolete-fails-to-close-unusable-connections/41900 * Add test for close_cache --- celery/fixups/django.py | 9 +++----- t/unit/fixups/test_django.py | 41 +++++++++++++++--------------------- 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/celery/fixups/django.py b/celery/fixups/django.py index b35499493a6..960077704e4 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -168,7 +168,7 @@ def on_worker_process_init(self, **kwargs: Any) -> None: self._maybe_close_db_fd(c) # use the _ version to avoid DB_REUSE preventing the conn.close() call - self._close_database(force=True) + self._close_database() self.close_cache() def _maybe_close_db_fd(self, c: "BaseDatabaseWrapper") -> None: @@ -197,13 +197,10 @@ def close_database(self, **kwargs: Any) -> None: self._close_database() self._db_recycles += 1 - def _close_database(self, force: bool = False) -> None: + def _close_database(self) -> None: for conn in self._db.connections.all(): try: - if force: - conn.close() - else: - conn.close_if_unusable_or_obsolete() + conn.close() except self.interface_errors: pass except self.DatabaseError as exc: diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py index c09ba61642c..0d6ab1d83b3 100644 --- a/t/unit/fixups/test_django.py +++ b/t/unit/fixups/test_django.py @@ -196,7 +196,7 @@ def test_on_worker_process_init(self, patching): f.on_worker_process_init() mcf.assert_called_with(conns[1].connection) f.close_cache.assert_called_with() - f._close_database.assert_called_with(force=True) + f._close_database.assert_called_with() f.validate_models = Mock(name='validate_models') patching.setenv('FORKED_BY_MULTIPROCESSING', '1') @@ -264,38 +264,31 @@ def test__close_database(self): f._db.connections = Mock() # ConnectionHandler f._db.connections.all.side_effect = lambda: conns - f._close_database(force=True) + f._close_database() conns[0].close.assert_called_with() - conns[0].close_if_unusable_or_obsolete.assert_not_called() conns[1].close.assert_called_with() - conns[1].close_if_unusable_or_obsolete.assert_not_called() conns[2].close.assert_called_with() - conns[2].close_if_unusable_or_obsolete.assert_not_called() - - for conn in conns: - conn.reset_mock() - - f._close_database() - conns[0].close.assert_not_called() - conns[0].close_if_unusable_or_obsolete.assert_called_with() - conns[1].close.assert_not_called() - conns[1].close_if_unusable_or_obsolete.assert_called_with() - conns[2].close.assert_not_called() - conns[2].close_if_unusable_or_obsolete.assert_called_with() conns[1].close.side_effect = KeyError( 'omg') - f._close_database() - with pytest.raises(KeyError): - f._close_database(force=True) - - conns[1].close.side_effect = None - conns[1].close_if_unusable_or_obsolete.side_effect = KeyError( - 'omg') - f._close_database(force=True) with pytest.raises(KeyError): f._close_database() + def test_close_database_always_closes_connections(self): + with self.fixup_context(self.app) as (f, _, _): + conn = Mock() + f._db.connections.all = Mock(return_value=[conn]) + f.close_database() + conn.close.assert_called_once_with() + # close_if_unusable_or_obsolete is not safe to call in all conditions, so avoid using + # it to optimize connection handling. + conn.close_if_unusable_or_obsolete.assert_not_called() + + def test_close_cache_raises_error(self): + with self.fixup_context(self.app) as (f, _, _): + f._cache.close_caches.side_effect = AttributeError + f.close_cache() + def test_close_cache(self): with self.fixup_context(self.app) as (f, _, _): f.close_cache() From 7adc9e6afc132c5ced1678fb7b8ed09a8a68f07a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?So=C3=B3s=20Tam=C3=A1s?= <39013301+tsoos99dev@users.noreply.github.com> Date: Mon, 11 Aug 2025 09:28:54 +0200 Subject: [PATCH 1048/1051] Fix pending_result memory leak (#9806) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add call to remove_pending_result, to counter add_pending_result in then. * Add unittest for checking if remove_pending_result is called after a call to forget. --------- Co-authored-by: Asif Saif Uddin {"Auvi":"অভি"} --- celery/result.py | 2 ++ t/smoke/tests/test_canvas.py | 12 ++++++++++++ t/unit/tasks/test_result.py | 14 ++++++++++++++ 3 files changed, 28 insertions(+) diff --git a/celery/result.py b/celery/result.py index 75512c5aadb..66a9e20aab8 100644 --- a/celery/result.py +++ b/celery/result.py @@ -137,6 +137,8 @@ def forget(self): self._cache = None if self.parent: self.parent.forget() + + self.backend.remove_pending_result(self) self.backend.forget(self.id) def revoke(self, connection=None, terminate=False, signal=None, diff --git a/t/smoke/tests/test_canvas.py b/t/smoke/tests/test_canvas.py index e0886d56e49..b6c69e76397 100644 --- a/t/smoke/tests/test_canvas.py +++ b/t/smoke/tests/test_canvas.py @@ -179,3 +179,15 @@ def test_chord_error_propagation_with_different_body_types( # The chord should fail with the expected exception from the failing task with pytest.raises(ExpectedException): result.get(timeout=RESULT_TIMEOUT) + + +class test_complex_workflow: + def test_pending_tasks_released_on_forget(self, celery_setup: CeleryTestSetup): + sig = add.si(1, 1) | group( + add.s(1) | group(add.si(1, 1), add.si(2, 2)) | add.si(2, 2), + add.s(1) | group(add.si(1, 1), add.si(2, 2)) | add.si(2, 2) + ) | add.si(1, 1) + res = sig.apply_async(queue=celery_setup.worker.worker_queue) + assert not all(len(mapping) == 0 for mapping in res.backend._pending_results) + res.forget() + assert all(len(mapping) == 0 for mapping in res.backend._pending_results) diff --git a/t/unit/tasks/test_result.py b/t/unit/tasks/test_result.py index 062c0695427..d5aaa481926 100644 --- a/t/unit/tasks/test_result.py +++ b/t/unit/tasks/test_result.py @@ -449,6 +449,20 @@ def test_date_done(self, utc_datetime_mock, timezone, date): result = Backend(app=self.app)._get_result_meta(None, states.SUCCESS, None, None) assert result.get('date_done') == date + def test_forget_remove_pending_result(self): + with patch('celery.result.AsyncResult.backend') as backend: + result = self.app.AsyncResult(self.task1['id']) + result.backend = backend + result_clone = copy.copy(result) + result.forget() + backend.remove_pending_result.assert_called_once_with( + result_clone + ) + + result = self.app.AsyncResult(self.task1['id']) + result.backend = None + del result + class test_ResultSet: From f4e2cf8138bcf8cb272d76216169001fd29566ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Asif=20Saif=20Uddin=20=7B=22Auvi=22=3A=22=E0=A6=85?= =?UTF-8?q?=E0=A6=AD=E0=A6=BF=22=7D?= Date: Tue, 12 Aug 2025 13:00:11 +0600 Subject: [PATCH 1049/1051] Update python-package.yml (#9856) --- .github/workflows/python-package.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 44a215a5efb..09f046aed55 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -52,7 +52,7 @@ jobs: if: startsWith(matrix.os, 'blacksmith-4vcpu-ubuntu') run: | sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} uses: useblacksmith/setup-python@v6 with: @@ -117,7 +117,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev libgnutls28-dev httping expect libmemcached-dev - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} uses: useblacksmith/setup-python@v6 with: @@ -176,7 +176,7 @@ jobs: sudo apt-get install -y procps # Install procps to enable sysctl sudo sysctl -w vm.overcommit_memory=1 - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: Set up Python ${{ matrix.python-version }} From e906aae8d3e2956ff4f64047e29a1f58610a18fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 13:07:01 +0600 Subject: [PATCH 1050/1051] Bump actions/checkout from 4 to 5 (#9857) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: Asif Saif Uddin {"Auvi":"অভি"} --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docker.yml | 10 +++++----- .github/workflows/linter.yml | 2 +- .github/workflows/semgrep.yml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 72078f37760..c4372c0848b 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,7 +39,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index a6cd26fbcd7..d91264cf842 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -29,7 +29,7 @@ jobs: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 60 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: Build Docker container @@ -39,7 +39,7 @@ jobs: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: dev" @@ -49,7 +49,7 @@ jobs: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: latest" @@ -59,7 +59,7 @@ jobs: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: pypi" @@ -69,7 +69,7 @@ jobs: runs-on: blacksmith-4vcpu-ubuntu-2204 timeout-minutes: 10 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup Docker Builder uses: useblacksmith/setup-docker-builder@v1 - name: "Build smoke tests container: legacy" diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 98a05f2b3a4..6f22274e9b7 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -8,7 +8,7 @@ jobs: steps: - name: Checkout branch - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Run pre-commit uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/semgrep.yml b/.github/workflows/semgrep.yml index 9078d214ff2..c33b7514c85 100644 --- a/.github/workflows/semgrep.yml +++ b/.github/workflows/semgrep.yml @@ -21,5 +21,5 @@ jobs: container: image: returntocorp/semgrep steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - run: semgrep ci From 33eb14852310996b1909c8388cd319809d6c8626 Mon Sep 17 00:00:00 2001 From: Tomer Nosrati Date: Tue, 12 Aug 2025 14:22:22 +0300 Subject: [PATCH 1051/1051] Bump Kombu to v5.6.0b2 (#9858) --- requirements/default.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/default.txt b/requirements/default.txt index 7e4b1ea24bd..015541462aa 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,5 +1,5 @@ billiard>=4.2.1,<5.0 -kombu>=5.6.0b1,<5.7 +kombu>=5.6.0b2,<5.7 vine>=5.1.0,<6.0 click>=8.1.2,<9.0 click-didyoumean>=0.3.0