我几乎遵循标准的项目组织结构。但是celery无法找到我应用程序中的任务。
它可以找到 celery.py 中的“ debug_task”,但无法提取应用程序 tasks.py 文件中的任务。
我的目录结构:
D:.
│ manage.py
│ requirements.txt
│
├───fooproject
│ │ celery.py
│ │ urls.py
│ │ wsgi.py
│ │ __init__.py
│ │
│ ├───settings
│ │ │ base.py
│ │ │ remote.py
│ │ │ __init__.py
│
│
├───bravo <- app directory
│ │ admin.py
│ │ apps.py
│ │ models.py
│ │ tasks.py <- tasks file from where it fails to pick up tasks
│ │ tests.py
│ │ __init__.py
fooproject / celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'fooproject.settings')
app = Celery('fooproject')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY', force=True)
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
fooproject / init .py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)
fooproject / settings / init .py(我正在本地测试,因此设置将来自fooproject / settings / base.py)
from .base import *
environment = os.environ.get('DEPLOYMENT_STATE')
environment = environment.upper() if environment else None
if environment == 'REMOTE':
from .remote import *
fooproject / settings / base.py
import os
from kombu import Queue
DEBUG = True
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
INSTALLED_APPS = [
# Installed apps
'django_extensions',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Third party apps
'rest_framework',
# Local apps
'bravo', # MY APP
]
# Other configs left for brevity
# Celery configs
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
CELERY_RESULT_BACKEND = 'amqp://guest:guest@localhost:5672//'
CELERY_TASK_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = {
Queue(LATE_RETRY_QUEUE_NAME, routing_key=LATE_RETRY_QUEUE_NAME),
Queue(FAIL_AUDIT_QUEUE_NAME, routing_key=FAIL_AUDIT_QUEUE_NAME)
}
bravo / tasks.py(这里的任务无法被芹菜捡起)
import logging
from datetime import datetime, timedelta
from django.conf import settings
from celery import app, shared_task
from celery.task import Task
logger = logging.getLogger(__name__)
def get_queue_name(request):
# By default celery uses the same name for queues and exchanges
original_queue = request.delivery_info['exchange']
for queue in app.amqp.queues.itervalues():
if queue.exchange.name == request.delivery_info['exchange'] \
and queue.routing_key == request.delivery_info['routing_key']:
original_queue = queue.name
break
return original_queue
class BaseTask(Task):
abstract = True
def after_return(self, status, retval, task_id, args, kwargs, einfo):
q_name = get_queue_name(self.request)
if self.max_retries == self.request.retries:
if q_name == settings.LATE_RETRY_QUENE_NAME:
logger.info('Task with args: %s failed in late retry' % args)
return
else:
logger.info('Sending task with args: %s to late retry and audit queue' % args)
self.apply_async(args=args, kwargs=kwargs,
queue=settings.LATE_RETRY_QUENE_NAME,
eta=datetime.now() + timedelta(hours=1))
self.apply_async(args=args, kwargs=kwargs, queue=settings.FAIL_AUDIT_QUEUE_NAME)
@shared_task(base=BaseTask,
bind=True,
autoretry_for=(Exception,),
max_retries=3,
retry_backoff=True,
retry_jitter=True)
def task_test(self, i):
if i >= 1:
raise Exception('Exception: %d' % i)