芹菜任务状态显示挂起
celery task status showing pending
我正在使用 celery,我得到的任务状态是 pending,可能是实现问题。请检查我的代码。
我正在尝试在我的 mongodb 数据库中保存任务信息,例如 ID、名称、状态,为此我正在使用一个函数,我的任务将调用该函数来将数据保存在 mongodb 中。
我是否因为我的函数调用发生在 return 任务语句之前而让我的任务挂起?
settings.py
CELERY_BROKER_URL = 'mongodb://localhost:27017/jobs'
CELERY_RESULT_BACKEND = "mongodb"
CELERY_IGNORE_RESULT = False
CELERY_TRACK_STARTED = True
CELERY_MONGODB_BACKEND_SETTINGS = {
"host": "127.0.0.1",
"port": 27017,
"database": "jobs",
"taskmeta_collection": "my_taskmeta_collection",
}
CELERY_BEAT_SCHEDULE = {
'add-every-minute-contrab': {
'task': 'username_length_periodically',
'schedule': crontab(minute='*/1'),
#'args' : (2,3),
},
}
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = TIME_ZONE
celery.py
from __future__ import absolute_import, unicode_literals
import os, logging
from celery import Celery
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'RestUserAPI.settings')
app = Celery('UserAPI')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from __future__ import absolute_import, unicode_literals
from celery import task, current_task, result
from django.conf import settings
import datetime
from .models import TaskMetaData
@task(name='username_length_periodically', bind=True)
def get_username_length_periodically(self):
last_run = datetime.datetime.now()
dict = {'name':self.name,
'id':self.request.id,
'status':self.AsyncResult(self.request.id).state,
'last_run': last_run,
}
store_metadata(dict)
return dict
def store_metadata(dict):
metadata = TaskMetaData()
metadata.task_id = dict['id']
metadata.task_name = dict['name']
metadata.task_status = dict['status']
metadata.task_last_run = dict['last_run']
metadata.save()
我认为这只是一个普通的老逻辑错误。如果您使用 AsyncResult
:
查看您的呼叫以检查任务的状态
'status':self.AsyncResult(self.request.id).state,
您会注意到您正在检查任务的状态,而任务是 运行。这意味着当您检查任务时,任务将始终显示状态 PENDING(除非您设置了 track_task_started),因为 您总是从任务内部检查任务的状态 然后永远不会返回并更新状态!
为了更新任务的状态,您应该启动一个单独的监控任务,定期检查任务的状态并将其记录到数据库中,直到任务完成或出错。例如,
@app.task(name='monitor')
def monitor(task_id):
result = AsyncResult(task_id)
if result.state in celery.results.READY_STATES:
# update metadata table for the task_id
...
else:
monitor.apply_async(kwargs={ 'task_id': task_id }, countdown=60)
我正在使用 celery,我得到的任务状态是 pending,可能是实现问题。请检查我的代码。
我正在尝试在我的 mongodb 数据库中保存任务信息,例如 ID、名称、状态,为此我正在使用一个函数,我的任务将调用该函数来将数据保存在 mongodb 中。
我是否因为我的函数调用发生在 return 任务语句之前而让我的任务挂起?
settings.py
CELERY_BROKER_URL = 'mongodb://localhost:27017/jobs'
CELERY_RESULT_BACKEND = "mongodb"
CELERY_IGNORE_RESULT = False
CELERY_TRACK_STARTED = True
CELERY_MONGODB_BACKEND_SETTINGS = {
"host": "127.0.0.1",
"port": 27017,
"database": "jobs",
"taskmeta_collection": "my_taskmeta_collection",
}
CELERY_BEAT_SCHEDULE = {
'add-every-minute-contrab': {
'task': 'username_length_periodically',
'schedule': crontab(minute='*/1'),
#'args' : (2,3),
},
}
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = TIME_ZONE
celery.py
from __future__ import absolute_import, unicode_literals
import os, logging
from celery import Celery
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'RestUserAPI.settings')
app = Celery('UserAPI')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from __future__ import absolute_import, unicode_literals
from celery import task, current_task, result
from django.conf import settings
import datetime
from .models import TaskMetaData
@task(name='username_length_periodically', bind=True)
def get_username_length_periodically(self):
last_run = datetime.datetime.now()
dict = {'name':self.name,
'id':self.request.id,
'status':self.AsyncResult(self.request.id).state,
'last_run': last_run,
}
store_metadata(dict)
return dict
def store_metadata(dict):
metadata = TaskMetaData()
metadata.task_id = dict['id']
metadata.task_name = dict['name']
metadata.task_status = dict['status']
metadata.task_last_run = dict['last_run']
metadata.save()
我认为这只是一个普通的老逻辑错误。如果您使用 AsyncResult
:
'status':self.AsyncResult(self.request.id).state,
您会注意到您正在检查任务的状态,而任务是 运行。这意味着当您检查任务时,任务将始终显示状态 PENDING(除非您设置了 track_task_started),因为 您总是从任务内部检查任务的状态 然后永远不会返回并更新状态!
为了更新任务的状态,您应该启动一个单独的监控任务,定期检查任务的状态并将其记录到数据库中,直到任务完成或出错。例如,
@app.task(name='monitor')
def monitor(task_id):
result = AsyncResult(task_id)
if result.state in celery.results.READY_STATES:
# update metadata table for the task_id
...
else:
monitor.apply_async(kwargs={ 'task_id': task_id }, countdown=60)