diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..4324616 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,20 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "[BUG]" +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**How to reproduce** +Describe how to reproduce the behavior. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..e0c0168 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: "[FEATURE]" +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 0000000..f7266d2 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,15 @@ +## Description +Describe your changes or fixes (please link to an issue if applicable) + +## Types of changes +- [ ] Breaking change (fix or feature that would cause existing functionality to change) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] Refactoring (improvements in base code) +- [ ] Add test (adds test coverage to functionality) + +## Checklist +- [ ] Automated tests +- [ ] Extends CHANGELOG.md +- [ ] Requires migrations? +- [ ] Requires dependency update? diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1a5d562..a9b88f4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,7 +13,6 @@ jobs: fail-fast: false matrix: python-version: - - "3.8" - "3.9" - "3.10" - "3.11" @@ -21,6 +20,7 @@ jobs: celery-version: - "5.2" - "5.3" + - "5.4" exclude: - python-version: "3.12" celery-version: "5.2" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..4ce655d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,27 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-merge-conflict + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: trailing-whitespace + args: ["--markdown-linebreak-ext=md"] + + - repo: https://github.com/asottile/pyupgrade + rev: v3.17.0 + hooks: + - id: pyupgrade + args: ["--py39-plus"] + + - repo: https://github.com/asottile/add-trailing-comma + rev: v3.1.0 + hooks: + - id: add-trailing-comma + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.9 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format diff --git a/CHANGELOG.md b/CHANGELOG.md index 33cda34..4626380 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.2.0] - 2024-10-25 +### Added +- Added support for Celery 5.4 + +### Removed +- Removed support for Python 3.8 + ## [1.1.0] - 2023-11-03 ### Added - Added support for Python 3.11 and Python 3.12 @@ -22,6 +29,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - AMQP result backend for Celery -[Unreleased]: https://github.com/anexia/celery-amqp-backend/compare/1.1.0...HEAD +[Unreleased]: https://github.com/anexia/celery-amqp-backend/compare/1.2.0...HEAD +[1.2.0]: https://github.com/anexia/celery-amqp-backend/releases/tag/1.2.0 [1.1.0]: https://github.com/anexia/celery-amqp-backend/releases/tag/1.1.0 [1.0.0]: https://github.com/anexia/celery-amqp-backend/releases/tag/1.0.0 diff --git a/LICENSE b/LICENSE index 7ed0a54..ac355b1 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2021 ANEXIA Internetdienstleisungs GmbH +Copyright (c) 2024 ANEXIA Internetdienstleisungs GmbH Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 19a6b5b..02eac85 100644 --- a/README.md +++ b/README.md @@ -68,13 +68,13 @@ result_exchange_type = 'direct' # Supported versions -| | Celery 5.2 | Celery 5.3 | -|-------------|------------|------------| -| Python 3.8 | ✓ | ✓ | -| Python 3.9 | ✓ | ✓ | -| Python 3.10 | ✓ | ✓ | -| Python 3.11 | ✓ | ✓ | -| Python 3.12 | ✗ | ✓ | +| | Celery 5.2 | Celery 5.3 | Celery 5.4 | +|-------------|------------|------------|------------| +| Python 3.9 | ✓ | ✓ | ✓ | +| Python 3.10 | ✓ | ✓ | ✓ | +| Python 3.11 | ✓ | ✓ | ✓ | +| Python 3.12 | ✗ | ✓ | ✓ | +| Python 3.13 | ✗ | ✗ | ✗ | # List of developers diff --git a/celery_amqp_backend/backend.py b/celery_amqp_backend/backend.py index a576e1b..6bd83f8 100644 --- a/celery_amqp_backend/backend.py +++ b/celery_amqp_backend/backend.py @@ -9,7 +9,7 @@ __all__ = [ - 'AMQPBackend', + "AMQPBackend", ] @@ -18,6 +18,7 @@ class AMQPBackend(base.BaseBackend): Celery result backend that creates a temporary queue for each result of a task. This backend is more or less a re-implementation for the old AMQP result backend. """ + READY_STATES = states.READY_STATES PROPAGATE_STATES = states.PROPAGATE_STATES @@ -35,22 +36,32 @@ class AMQPBackend(base.BaseBackend): supports_native_join = True retry_policy = { - 'max_retries': 20, - 'interval_start': 0, - 'interval_step': 1, - 'interval_max': 1, + "max_retries": 20, + "interval_start": 0, + "interval_step": 1, + "interval_max": 1, } - def __init__(self, app, exchange=None, exchange_type=None, persistent=None, serializer=None, auto_delete=True, - **kwargs): + def __init__( + self, + app, + exchange=None, + exchange_type=None, + persistent=None, + serializer=None, + auto_delete=True, + **kwargs, + ): super().__init__(app, **kwargs) conf = self.app.conf self.persistent = self.prepare_persistent(persistent) self.delivery_mode = 2 if self.persistent else 1 - self.result_exchange = exchange or conf.result_exchange or 'celery_result' - self.result_exchange_type = exchange_type or conf.result_exchange_type or 'direct' + self.result_exchange = exchange or conf.result_exchange or "celery_result" + self.result_exchange_type = ( + exchange_type or conf.result_exchange_type or "direct" + ) self.exchange = self._create_exchange( self.result_exchange, self.result_exchange_type, @@ -59,7 +70,15 @@ def __init__(self, app, exchange=None, exchange_type=None, persistent=None, seri self.serializer = serializer or conf.result_serializer self.auto_delete = auto_delete - def store_result(self, task_id, result, state, traceback=None, request=None, **kwargs): + def store_result( + self, + task_id, + result, + state, + traceback=None, + request=None, + **kwargs, + ): """ Sends the task result for the given task identifier to the task result queue and returns the sent result dict. @@ -73,16 +92,19 @@ def store_result(self, task_id, result, state, traceback=None, request=None, **k """ # Determine the routing key and a potential correlation identifier. We use the task identifier as # correlation identifier as a fallback. - routing_key, correlation_id = self._create_routing_key(task_id), request and request.correlation_id or task_id + routing_key, correlation_id = ( + self._create_routing_key(task_id), + request and request.correlation_id or task_id, + ) with self.app.amqp.producer_pool.acquire(block=True) as producer: producer.publish( { - 'task_id': task_id, - 'status': state, - 'result': self.encode_result(result, state), - 'traceback': traceback, - 'children': self.current_task_children(request), + "task_id": task_id, + "status": state, + "result": self.encode_result(result, state), + "traceback": traceback, + "children": self.current_task_children(request), }, exchange=self.exchange, routing_key=routing_key, @@ -98,7 +120,16 @@ def store_result(self, task_id, result, state, traceback=None, request=None, **k return result - def wait_for(self, task_id, timeout=None, cache=True, no_ack=True, on_message=None, on_interval=None, **kwargs): + def wait_for( + self, + task_id, + timeout=None, + cache=True, + no_ack=True, + on_message=None, + on_interval=None, + **kwargs, + ): """ Gets a single task result from the queue. The messages may come from the cache or may be consumed from the queue itself. This method returns the task result data as a dict and may raise an exception if a result @@ -114,19 +145,28 @@ def wait_for(self, task_id, timeout=None, cache=True, no_ack=True, on_message=No :return: Task result body as dict """ for fetched_task_id, fetched_task_result in self.get_many( - [ - task_id, - ], - timeout=timeout, - no_ack=no_ack, - cache=cache, - on_interval=on_interval, + [ + task_id, + ], + timeout=timeout, + no_ack=no_ack, + cache=cache, + on_interval=on_interval, ): return fetched_task_result raise self.WaitEmptyException(task=task_id) - def get_many(self, task_ids, timeout=None, no_ack=True, cache=True, on_message=None, on_interval=None, **kwargs): + def get_many( + self, + task_ids, + timeout=None, + no_ack=True, + cache=True, + on_message=None, + on_interval=None, + **kwargs, + ): """ Gets multiple task results from the queue. The messages may come from the cache or may be consumed from the queue itself. This method returns an iterator for tuples of task identifier and task results and may raise @@ -150,7 +190,10 @@ def get_many(self, task_ids, timeout=None, no_ack=True, cache=True, on_message=N if cache: for task_id in task_ids: cached_task_result = get_cached(task_id) - if cached_task_result and cached_task_result['status'] in self.READY_STATES: + if ( + cached_task_result + and cached_task_result["status"] in self.READY_STATES + ): yield task_id, cached_task_result mark_cached(task_id) @@ -187,7 +230,10 @@ def on_message_callback(message): if on_message is not None: on_message(received_task_result) - received_task_state, received_task_id = received_task_result['status'], received_task_result['task_id'] + received_task_state, received_task_id = ( + received_task_result["status"], + received_task_result["task_id"], + ) # If the task result is ready, we push it to the result cache. If the task result is als a result # for a task we are looking for, we push the result to the `results` collection to yield it afterwards. @@ -200,7 +246,13 @@ def on_message_callback(message): # Create the queue bindings for the tasks we want the results for. bindings = self._create_many_bindings(task_ids) - with self.Consumer(channel, bindings, on_message=on_message_callback, accept=self.accept, no_ack=no_ack): + with self.Consumer( + channel, + bindings, + on_message=on_message_callback, + accept=self.accept, + no_ack=no_ack, + ): # Drain task results from the bindings as long as there are tasks left whose result we did # not yield yet. while task_ids: @@ -212,7 +264,7 @@ def on_message_callback(message): while results: task_result = next_task_result() - task_id = task_result['task_id'] + task_id = task_result["task_id"] task_ids.discard(task_id) yield task_id, task_result @@ -243,7 +295,8 @@ def get_task_meta(self, task_id, backlog_limit=1000): for i in range(backlog_limit): # Get a pending message from the queue. current = binding.get( - accept=self.accept, no_ack=False, + accept=self.accept, + no_ack=False, ) # If there are no more pending messages on the queue, we have found the latest message and can @@ -254,7 +307,7 @@ def get_task_meta(self, task_id, backlog_limit=1000): # We make sure that the task result message we got is for the task we are interested in. As we declare # a separate result queue for each task, there should not be any messages for other tasks, but better # be safe than sorry. - if current.payload['task_id'] == task_id: + if current.payload["task_id"] == task_id: prev, latest = latest, current if prev: @@ -277,8 +330,8 @@ def get_task_meta(self, task_id, backlog_limit=1000): return self._cache[task_id] except KeyError: return { - 'status': states.PENDING, - 'result': None, + "status": states.PENDING, + "result": None, } def as_uri(self, include_password=True): @@ -288,33 +341,37 @@ def as_uri(self, include_password=True): :param include_password: Include passwords for the backend (unused for this backend) :return: Backend URL representation """ - return '%s.%s://' % ( + return "{}.{}://".format( self.__class__.__module__, self.__class__.__qualname__, ) def reload_task_result(self, task_id): - raise NotImplementedError('reload_task_result is not supported by this backend.') + raise NotImplementedError( + "reload_task_result is not supported by this backend.", + ) def reload_group_result(self, task_id): - raise NotImplementedError('reload_group_result is not supported by this backend.') + raise NotImplementedError( + "reload_group_result is not supported by this backend.", + ) def save_group(self, group_id, result): - raise NotImplementedError('save_group is not supported by this backend.') + raise NotImplementedError("save_group is not supported by this backend.") def restore_group(self, group_id, cache=True): - raise NotImplementedError('restore_group is not supported by this backend.') + raise NotImplementedError("restore_group is not supported by this backend.") def delete_group(self, group_id): - raise NotImplementedError('delete_group is not supported by this backend.') + raise NotImplementedError("delete_group is not supported by this backend.") def add_to_chord(self, chord_id, result): - raise NotImplementedError('add_to_chord is not supported by this backend.') + raise NotImplementedError("add_to_chord is not supported by this backend.") def _forget(self, task_id): pass - def _create_exchange(self, name, exchange_type='direct', delivery_mode=2): + def _create_exchange(self, name, exchange_type="direct", delivery_mode=2): """ Creates an exchange with the given parameters. @@ -365,7 +422,7 @@ def _create_routing_key(self, task_id): :param task_id: Task identifier as string :return: Routing key as string """ - return f'{self.result_exchange}.{task_id}' + return f"{self.result_exchange}.{task_id}" def __reduce__(self, args=(), kwargs=None): kwargs = kwargs if kwargs else {} diff --git a/celery_amqp_backend/exceptions.py b/celery_amqp_backend/exceptions.py index f5387e9..0c9b544 100644 --- a/celery_amqp_backend/exceptions.py +++ b/celery_amqp_backend/exceptions.py @@ -1,9 +1,9 @@ import celery.exceptions __all__ = [ - 'AMQPBacklogLimitExceededException', - 'AMQPWaitEmptyException', - 'AMQPWaitTimeoutException', + "AMQPBacklogLimitExceededException", + "AMQPWaitEmptyException", + "AMQPWaitTimeoutException", ] @@ -11,6 +11,7 @@ class BaseCeleryException(Exception): """ Base class for all celery related exceptions within the application. """ + _msg_template = None def __init__(self, *args, internal_exception=None, **kwargs): @@ -28,4 +29,4 @@ class AMQPWaitEmptyException(BaseCeleryException): class AMQPWaitTimeoutException(BaseCeleryException, celery.exceptions.TimeoutError): - _msg_template = 'The operation timed out.' + _msg_template = "The operation timed out." diff --git a/pyproject.toml b/pyproject.toml index f6c1689..36beabb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,9 @@ [build-system] requires = [ - "setuptools>=42", - "wheel", + "setuptools>=75", + "wheel>=0.44", ] build-backend = "setuptools.build_meta" + +[tool.ruff] +ignore = ["F403", "F405"] diff --git a/requirements.txt b/requirements.txt index 6e5e43e..c90c821 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ # Package and package dependencies -e . +codecov>=2.1,<2.2 +django>=4.2,<6 +flake8>=7.1.0,<7.2.0 # Development dependencies -pytest>=7.4,<7.5 -flake8>=6.1.0,<6.2.0 -codecov>=2.1,<2.2 -django>=4.2,<4.3 -setuptools>=42 -wheel>=0.37 -twine>=3.4 +pytest>=8.3,<8.4 +setuptools>=75 +twine>=5.1 +wheel>=0.44 diff --git a/setup.py b/setup.py index 6ee7f96..65fb554 100644 --- a/setup.py +++ b/setup.py @@ -2,40 +2,39 @@ from setuptools import find_packages, setup -with open(os.path.join(os.path.dirname(__file__), 'README.md')) as fh: +with open(os.path.join(os.path.dirname(__file__), "README.md")) as fh: readme = fh.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( - name='celery-amqp-backend', - version=os.getenv('PACKAGE_VERSION', '0.0.0').replace('refs/tags/', ''), + name="celery-amqp-backend", + version=os.getenv("PACKAGE_VERSION", "0.0.0").replace("refs/tags/", ""), packages=find_packages(), include_package_data=True, - license='MIT', - description='A rewrite of the original Celery AMQP result backend that supports Celery 5.0 and newer.', + license="MIT", + description="A rewrite of the original Celery AMQP result backend that supports Celery 5.0 and newer.", long_description=readme, - long_description_content_type='text/markdown', - url='https://github.com/anexia/celery-amqp-backend', - author='Andreas Stocker', - author_email='AStocker@anexia-it.com', + long_description_content_type="text/markdown", + url="https://github.com/anexia/celery-amqp-backend", + author="Andreas Stocker", + author_email="AStocker@anexia-it.com", install_requires=[ - 'celery>=5.2,<6.0', + "celery>=5.2,<6.0", ], classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Framework :: Celery', - 'Topic :: Software Development', + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Framework :: Celery", + "Topic :: Software Development", ], ) diff --git a/tests/test_project/manage.py b/tests/test_project/manage.py index ca475ea..cd7781b 100755 --- a/tests/test_project/manage.py +++ b/tests/test_project/manage.py @@ -5,17 +5,17 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_project.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_project.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" + "forget to activate a virtual environment?", ) from exc execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tests/test_project/test_project/__init__.py b/tests/test_project/test_project/__init__.py index a1a2207..3ecbc23 100644 --- a/tests/test_project/test_project/__init__.py +++ b/tests/test_project/test_project/__init__.py @@ -2,5 +2,5 @@ __all__ = [ - 'celery_app', + "celery_app", ] diff --git a/tests/test_project/test_project/celery.py b/tests/test_project/test_project/celery.py index 513a579..cfcd6a2 100644 --- a/tests/test_project/test_project/celery.py +++ b/tests/test_project/test_project/celery.py @@ -1,17 +1,17 @@ from celery import Celery __all__ = [ - 'app', + "app", ] -app = Celery('test_project') +app = Celery("test_project") # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings', namespace='CELERY') +app.config_from_object("django.conf:settings", namespace="CELERY") # Load task modules from all registered Django app configs. app.autodiscover_tasks() diff --git a/tests/test_project/test_project/settings.py b/tests/test_project/test_project/settings.py index 63f8e0b..fc348cb 100644 --- a/tests/test_project/test_project/settings.py +++ b/tests/test_project/test_project/settings.py @@ -8,7 +8,7 @@ # See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'django-insecure-7db3wq7w6q@g3h^al9&ji=una44&ba+i#pz=@q=$+#&cx6%@$g' +SECRET_KEY = "django-insecure-7db3wq7w6q@g3h^al9&ji=una44&ba+i#pz=@q=$+#&cx6%@$g" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True @@ -19,40 +19,40 @@ # Application definition INSTALLED_APPS = [ - 'test_project.tests', + "test_project.tests", ] MIDDLEWARE = [] -ROOT_URLCONF = 'test_project.urls' +ROOT_URLCONF = "test_project.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'test_project.wsgi.application' +WSGI_APPLICATION = "test_project.wsgi.application" # Database # https://docs.djangoproject.com/en/3.2/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', - } + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": BASE_DIR / "db.sqlite3", + }, } @@ -65,9 +65,9 @@ # Internationalization # https://docs.djangoproject.com/en/3.2/topics/i18n/ -LANGUAGE_CODE = 'en-gb' +LANGUAGE_CODE = "en-gb" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -79,28 +79,28 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.2/howto/static-files/ -STATIC_URL = '/static/' +STATIC_URL = "/static/" # Default primary key field type # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # Celery Setup # http://docs.celeryproject.org/en/latest/django/first-steps-with-django.html -CELERY_BROKER_URL = 'amqp://127.0.0.1/' -CELERY_RESULT_BACKEND = 'celery_amqp_backend.AMQPBackend://' +CELERY_BROKER_URL = "amqp://127.0.0.1/" +CELERY_RESULT_BACKEND = "celery_amqp_backend.AMQPBackend://" CELERY_BROKER_POOL_LIMIT = 10 -CELERY_TASK_DEFAULT_QUEUE = 'tests.default' -CELERY_EVENT_QUEUE_PREFIX = 'tests.event' -CELERY_RESULT_EXCHANGE = 'tests.result' -CELERY_EVENT_EXCHANGE = 'tests.event' -CELERY_CONTROL_EXCHANGE = 'tests.control' +CELERY_TASK_DEFAULT_QUEUE = "tests.default" +CELERY_EVENT_QUEUE_PREFIX = "tests.event" +CELERY_RESULT_EXCHANGE = "tests.result" +CELERY_EVENT_EXCHANGE = "tests.event" +CELERY_CONTROL_EXCHANGE = "tests.control" CELERY_ACCEPT_CONTENT = [ - 'json', + "json", ] -CELERY_TASK_DEFAULT_DELIVERY_MODE = 'transient' +CELERY_TASK_DEFAULT_DELIVERY_MODE = "transient" CELERY_RESULT_EXPIRES = 60 CELERY_RESULT_PERSISTENT = False CELERY_TASK_ACKS_LATE = False diff --git a/tests/test_project/test_project/tests/__init__.py b/tests/test_project/test_project/tests/__init__.py index d50bd68..d2665cf 100644 --- a/tests/test_project/test_project/tests/__init__.py +++ b/tests/test_project/test_project/tests/__init__.py @@ -1,5 +1,5 @@ __all__ = [ - 'default_app_config', + "default_app_config", ] -default_app_config = 'test_project.tests.apps.TestsConfig' +default_app_config = "test_project.tests.apps.TestsConfig" diff --git a/tests/test_project/test_project/tests/apps.py b/tests/test_project/test_project/tests/apps.py index 5631c4d..e00e792 100644 --- a/tests/test_project/test_project/tests/apps.py +++ b/tests/test_project/test_project/tests/apps.py @@ -1,9 +1,9 @@ from django.apps import AppConfig __all__ = [ - 'TestsConfig', + "TestsConfig", ] class TestsConfig(AppConfig): - name = 'test_project.tests' + name = "test_project.tests" diff --git a/tests/test_project/test_project/tests/tasks.py b/tests/test_project/test_project/tests/tasks.py index 5018318..724dd94 100644 --- a/tests/test_project/test_project/tests/tasks.py +++ b/tests/test_project/test_project/tests/tasks.py @@ -3,11 +3,11 @@ from test_project import * __all__ = [ - 'sum_numbers', - 'add_numbers', - 'add_numbers_slow', - 'combine_lists', - 'combine_dicts', + "sum_numbers", + "add_numbers", + "add_numbers_slow", + "combine_lists", + "combine_dicts", ] diff --git a/tests/test_project/test_project/tests/tests/base.py b/tests/test_project/test_project/tests/tests/base.py index ccdbdd6..ce7153f 100644 --- a/tests/test_project/test_project/tests/tests/base.py +++ b/tests/test_project/test_project/tests/tests/base.py @@ -8,7 +8,7 @@ from test_project import * __all__ = [ - 'BaseIntegrationTestCase', + "BaseIntegrationTestCase", ] @@ -16,28 +16,34 @@ class BaseIntegrationTestCase(test.TransactionTestCase): """ Base class for integration tests using api calls and the celery queue. """ + worker_controller = None worker_context = None @staticmethod def _get_celery_routes(): - celery_routes = getattr(settings, 'CELERY_TASK_ROUTES', {}) - celery_queues = list(set([ - r.get('queue', settings.CELERY_TASK_DEFAULT_QUEUE) - for r in celery_routes.values() - ] + [settings.CELERY_TASK_DEFAULT_QUEUE, ])) + celery_routes = getattr(settings, "CELERY_TASK_ROUTES", {}) + celery_queues = list( + set( + [ + r.get("queue", settings.CELERY_TASK_DEFAULT_QUEUE) + for r in celery_routes.values() + ] + + [settings.CELERY_TASK_DEFAULT_QUEUE], + ), + ) return celery_queues @classmethod def setUpClass(cls): celery_app.conf.task_reject_on_worker_lost = True - celery_app.loader.import_module('celery.contrib.testing.tasks') + celery_app.loader.import_module("celery.contrib.testing.tasks") worker.setup_app_for_worker(celery_app, None, None) cls.worker_context = worker.start_worker( celery_app, - pool='solo', + pool="solo", perform_ping_check=False, queues=cls._get_celery_routes(), ) diff --git a/tests/test_project/test_project/tests/tests/test_backend.py b/tests/test_project/test_project/tests/tests/test_backend.py index f953e5d..fbb7188 100644 --- a/tests/test_project/test_project/tests/tests/test_backend.py +++ b/tests/test_project/test_project/tests/tests/test_backend.py @@ -8,7 +8,7 @@ from .base import * __all__ = [ - 'BackendTestCase', + "BackendTestCase", ] @@ -17,8 +17,11 @@ class BackendTestCase(BaseIntegrationTestCase): def test_async_result(self): self.assertEqual(add_numbers.delay(1, 2).get(), 3) - self.assertEqual(combine_lists.delay([1, 2, ], [3, 4, ]).get(), [1, 2, 3, 4]) - self.assertEqual(combine_dicts.delay({'a': 'abc', }, {'b': 'efg', }).get(), {'a': 'abc', 'b': 'efg', }) + self.assertEqual(combine_lists.delay([1, 2], [3, 4]).get(), [1, 2, 3, 4]) + self.assertEqual( + combine_dicts.delay({"a": "abc"}, {"b": "efg"}).get(), + {"a": "abc", "b": "efg"}, + ) def test_async_result_status(self): async_result = add_numbers_slow.delay(1, 2) @@ -32,25 +35,29 @@ def test_async_result_status(self): self.assertEqual(async_result.successful(), True) def test_async_result_group(self): - async_job = celery.group([ - add_numbers.s(1, 2), - combine_lists.s([1, 2, ], [3, 4, ]), - combine_dicts.s({'a': 'abc', }, {'b': 'efg', }), - ]) + async_job = celery.group( + [ + add_numbers.s(1, 2), + combine_lists.s([1, 2], [3, 4]), + combine_dicts.s({"a": "abc"}, {"b": "efg"}), + ], + ) async_result = async_job.apply_async() result = async_result.get() self.assertEqual(result[0], 3) self.assertEqual(result[1], [1, 2, 3, 4]) - self.assertEqual(result[2], {'a': 'abc', 'b': 'efg', }) + self.assertEqual(result[2], {"a": "abc", "b": "efg"}) def test_async_result_group_status(self): - async_job = celery.group([ - add_numbers.s(1, 2), - add_numbers_slow.s(1, 2), - combine_lists.s([1, 2, ], [3, 4, ]), - combine_dicts.s({'a': 'abc', }, {'b': 'efg', }), - ]) + async_job = celery.group( + [ + add_numbers.s(1, 2), + add_numbers_slow.s(1, 2), + combine_lists.s([1, 2], [3, 4]), + combine_dicts.s({"a": "abc"}, {"b": "efg"}), + ], + ) async_result = async_job.apply_async() self.assertEqual(async_result.ready(), False) @@ -62,20 +69,24 @@ def test_async_result_group_status(self): self.assertEqual(async_result.successful(), True) def test_async_result_chord(self): - async_chord = celery.chord([ - add_numbers.s(1, 2), - add_numbers_slow.s(3, 4), - ]) + async_chord = celery.chord( + [ + add_numbers.s(1, 2), + add_numbers_slow.s(3, 4), + ], + ) async_result = async_chord(sum_numbers.s()) result = async_result.get() self.assertEqual(result, 10) def test_async_result_chord_status(self): - async_chord = celery.chord([ - add_numbers.s(1, 2), - add_numbers_slow.s(3, 4), - ]) + async_chord = celery.chord( + [ + add_numbers.s(1, 2), + add_numbers_slow.s(3, 4), + ], + ) async_result = async_chord(sum_numbers.s()) self.assertEqual(async_result.ready(), False) diff --git a/tests/test_project/test_project/urls.py b/tests/test_project/test_project/urls.py index 9004533..b25a873 100644 --- a/tests/test_project/test_project/urls.py +++ b/tests/test_project/test_project/urls.py @@ -1,5 +1,5 @@ __all__ = [ - 'urlpatterns', + "urlpatterns", ] urlpatterns = [] diff --git a/tests/test_project/test_project/wsgi.py b/tests/test_project/test_project/wsgi.py index 1e6481c..535131b 100644 --- a/tests/test_project/test_project/wsgi.py +++ b/tests/test_project/test_project/wsgi.py @@ -2,6 +2,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_project.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_project.settings") application = get_wsgi_application()