我已经使用cookiecutter
来设置django项目。项目的相关部分应为:
文件config/settings/base.py
:
INSTALLED_APPS += ['foo.taskapp.celery.CeleryAppConfig']
if USE_TZ:
CELERY_TIMEZONE = TIME_ZONE
CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='redis://redis:6379/0')
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYD_TASK_TIME_LIMIT = 5 * 60
CELERYD_TASK_SOFT_TIME_LIMIT = 60
### EDIT: I had forgotten these: ###
CELERY_ALWAYS_EAGER=True # thats the problem
CELERY_TASK_EAGER_PROPAGATES = True
文件foo/taskapp/celery.py
(应与cookiecutter相同):
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('foo')
class CeleryAppConfig(AppConfig):
name = 'foo.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
# Since raven is required in production only,
# imports might (most surely will) be wiped out
# during PyCharm code clean up started
# in other environments.
# @formatter:off
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
# @formatter:on
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
@app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}') # pragma: no cover
文件foo/foo/tasks.py
:
class TaskHandler(Task):
name = "..."
def run(self, *args, **kwargs):
# this will block the main django thread
time.sleep(10)
def on_success(self, retval, task_id, args, kwargs):
# enters here succesfully
...
def on_failure(self, exc, task_id, args, kwargs, einfo):
# does NOT enter here if exception is raised in run()
...
tasktype = app.register_task(TaskHandler())
# app.tasks.register(TaskHandler()) # also tried this
文件foo/foo/views.py
:
class FooCreateView(FormValidMessageMixin, CreateView):
...
def form_valid(self, form):
form.instance.user = self.request.user
with transaction.atomic():
# Save the self.object and render the success_url response page
response = super().form_valid(form)
...
kwargs = { ... } # json serializable
transaction.on_commit(lambda: tasktype.delay(**kwargs))
# transaction.on_commit(lambda: TaskHandler().delay(**kwargs)) # also tried
return response
只有在celery任务完成后,http响应才会返回到浏览器。
$ celery worker -A foo.taskapp -l info
。我只能看到Django根记录器的消息(也没有任何信息)。
有什么想法吗?