Django中使用Celery

本文详细介绍了如何在Django项目中整合Celery进行异步任务处理,包括配置、任务定义、结果获取及定时任务设定。通过实例演示了如何使用Celery执行加法和乘法任务,并从Redis获取结果,同时涵盖了如何利用Django-celery-beat和Django-celery-results进行定时任务管理和结果存储。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

模块

Python 3.6.6
PyMySQL==0.8.1
Django==2.1.3
redis==3.0.1
celery==4.1.1
django-celery-beat ==1.1.1
django-celery-results==1.0.1

目录结构

emgc
├── cron
│   ├── __init__.py
│   ├── apps.py
│   ├── migrations
│   │   └── __init__.py
│   ├── models.py
│   ├── tasks.py
│   └── views.py
├── front
│   ├── __init__.py
│   ├── apps.py
│   ├── migrations
│   │   └── __init__.py
│   ├── models.py
│   ├── tasks.py
│   └── views.py
├── manage.py
├── emgc
│   ├── __init__.py
│   ├──	celery.py
│   ├── settings.py
│   ├── urls.py
│   └── wsgi.py
└── templates

配置使用

emgc/emgc/celery.py:

	from __future__ import absolute_import, unicode_literals
	import os
	from celery import Celery
	from emgc import settings
	
	# set the default Django settings module for the 'celery' program.

	os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'emgc.settings')
	app = Celery('emgc')
	
	# Using a string here means the worker don't have to serialize
	# the configuration object to child processes.
	# - namespace='CELERY' means all celery-related configuration keys
	#   should have a `CELERY_` prefix.
	
	app.config_from_object('django.conf:settings', namespace='CELERY')
	
	# Load task modules from all registered Django app configs.
	
	app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)	
	
		
emgc/emgc/__init__.py:

	from __future__ import absolute_import, unicode_literals
	
	# This will make sure the app is always imported when
	# Django starts so that shared_task will use this app.
	
	from .celery import app as celery_app
	__all__ = ['celery_app']


emgc/emgc/settings.py

	import pymysql
	pymysql.install_as_MySQLdb()
	INSTALLED_APPS = [
		...
	    'django_celery_results',
	    'django_celery_beat',
	    'front',
	]
	LANGUAGE_CODE = 'zh-Hans'
	TIME_ZONE = 'Asia/Shanghai'
	USE_I18N = True
	USE_L10N = True
	USE_TZ = True #如果USE_TZ设置为True时,Django会使用系统默认设置的时区,即America/Chicago,此时的TIME_ZONE不管有没有设置都不起作用。
	# celery for redis
	# 由于celery-4.1.0存在时区bug,必须使用UTC时区
	CELERY_RESULT_BACKEND = 'redis://localhost:6379' # BACKEND配置,这里使用redis
	# CELERY_RESULT_BACKEND = 'django-db' #使用django orm 作为结果存储
	CELERY_BROKER_URL = 'redis://localhost:6379'
	CELERY_TIMEZONE = 'UTC'
	CELERY_ENABLE_UTC = True
	CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
	# 使用数据库来存放定时任务记录
	CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'


进入项目的taskproj目录启动worker:
	celery -A tasks worker --pool=solo -l info (在windows下 不加--pool=solo 出现报错ValueError: not enough values to unpack (expected 3, got 0))

定义任务

emgc/front/tasks.py:

	from __future__ import absolute_import, unicode_literals
	from celery import shared_task
	
	@shared_task
	def add(x, y):
	    return x + y
	    
	@shared_task
	def mul(x, y):
	    return x * y

触发任务

emgc/front/views.py:

	from django.http import JsonResponse
	from app01 import tasks
	# Create your views here.
	def index(request):
	    x = int(request.GET.get("x"))
	    y = int(request.GET.get("y"))
	    res1=tasks.add.delay(x,y)
	    res2=tasks.mul.delay(x,y)
	    print("add:", x, y, res1.task_id)
	    print("mul:", x, y, res2.task_id)
	    return JsonResponse("success", safe=False)

从redis中获取结果

	127.0.0.1:6379> get celery-task-meta-517829d8-ebe5-4be2-95d9-d750df717cc0 (517829d8-ebe5-4be2-95d9-d750df717cc0 为task_id)

使用django orm 作为结果存储:

CELERY_RESULT_BACKEND = 'django-db'   emgc/emgc/settings.py中配置

python3 manage.py migrate django_celery_results  生成表
(django_celery_results_taskresult)

model :
		class TaskResult(models.Model):
		"""Task result/status."""

		task_id = models.CharField(_('task id'), max_length=255, unique=True)
		task_name = models.CharField(_('task name'), null=True, max_length=255)
		task_args = models.TextField(_('task arguments'), null=True)
		task_kwargs = models.TextField(_('task kwargs'), null=True)
		status = models.CharField(_('state'), max_length=50,
								  default=states.PENDING,
								  choices=TASK_STATE_CHOICES
								  )
		content_type = models.CharField(_('content type'), max_length=128)
		content_encoding = models.CharField(_('content encoding'), max_length=64)
		result = models.TextField(null=True, default=None, editable=False)
		date_done = models.DateTimeField(_('done at'), auto_now=True)
		traceback = models.TextField(_('traceback'), blank=True, null=True)
		hidden = models.BooleanField(editable=False, default=False, db_index=True)
		meta = models.TextField(null=True, default=None, editable=False)

		objects = managers.TaskResultManager()

		class Meta:
			"""Table information."""

			ordering = ['-date_done']

			verbose_name = _('task result')
			verbose_name_plural = _('task results')

		def as_dict(self):
			return {
				'task_id': self.task_id,
				'task_name': self.task_name,
				'task_args': self.task_args,
				'task_kwargs': self.task_kwargs,
				'status': self.status,
				'result': self.result,
				'date_done': self.date_done,
				'traceback': self.traceback,
				'meta': self.meta,
			}

		def __str__(self):
			return '<Task: {0.task_id} ({0.status})>'.format(self)

定时任务配置

数据库变更:
	python3 manage.py migrate django_celery_beat
分别启动woker和beta:
	celery beat -A emgc -l info 
	celery worker -A emgc --pool=solo -l info (在windows下 不加--pool=solo 出现报错ValueError: not enough values to unpack (expected 3, got 0))

定义定时任务

emgc/cron/tasks.py:
	
	from celery import shared_task
	@shared_task
	def add(**kargs):
	    x = kargs.get("x")
	    y = kargs.get("y")
	    print("加法结果:",x+y)
	    return x + y
	@shared_task
	def mul(**kargs):
	    x = kargs.get("x")
	    y = kargs.get("y")
	    print("乘法结果:",x*y)
	    return x * y

计划、间隔任务创建

emgc/cron/views.py:
	#计划任务
	def crontab(request):
	    minute = request.GET.get('minute', request.POST.get('minute'))
	    hour = request.GET.get('hour', request.POST.get('hour'))
	    day_of_week = request.GET.get('day_of_week', request.POST.get('day_of_week'))
	    day_of_month = request.GET.get('day_of_month', request.POST.get('day_of_month'))
	    month_of_year = request.GET.get('month_of_year', request.POST.get('month_of_year'))
	    x = request.GET.get('x', request.POST.get('x'))
	    y = request.GET.get('y', request.POST.get('y'))
	    schedule, created = CrontabSchedule.objects.get_or_create(minute=minute, hour=hour,day_of_week=day_of_week,day_of_month=day_of_month, month_of_year=month_of_year)
	    PeriodicTask.objects.get_or_create(
	                                name="加法",
	                                task="cron.tasks.add",
	                                crontab=schedule,
	                                kwargs=json.dumps({"x":x, "y":y}),
	                                description=u"计划任务测试")
	    return JsonResponse(created, safe=False)
	
	#间隔任务
	def interval(request):
	    seconds = request.GET.get('seconds', request.POST.get('seconds'))
	    x = int(request.GET.get('x', request.POST.get('x')))
	    y = int(request.GET.get('y', request.POST.get('y')))
	    schedule, created = IntervalSchedule.objects.get_or_create(every=seconds, period=IntervalSchedule.SECONDS)
	    print(IntervalSchedule.DAYS, IntervalSchedule.HOURS, IntervalSchedule.MINUTES, IntervalSchedule.SECONDS, IntervalSchedule.MICROSECONDS)
	    PeriodicTask.objects.get_or_create(
	        name="乘法",
	        task="cron.tasks.mul",
	        interval=schedule,
	        kwargs=json.dumps({'x': x, 'y': y}),
	        description=u"周期任务测试"
	    )
	    return JsonResponse(created, safe=False)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值