网站程序元,网站营销外包,宁波seo快速优化教程,电子商务网站建设技术方案在celery的配置方法中有个参数叫task_routes#xff0c;是用来设置不同的任务 消费不同的队列#xff08;也就是路由#xff09;。
格式如下#xff1a;
{ ‘task name’: { ‘queue’: ‘queue name’ }}直接上代码#xff0c;简单明了#xff0c;目录格式如下#x…在celery的配置方法中有个参数叫task_routes是用来设置不同的任务 消费不同的队列也就是路由。
格式如下
{ ‘task name’: { ‘queue’: ‘queue name’ }}直接上代码简单明了目录格式如下 首先是配置文件 config.init.py
import os
import sys
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))class Config(object):配置文件基类 项目名称 PROJECT_NAME crawler_worker celery backend存放结果 CELERY_BACKEND_URL redis://127.0.0.1:6379/4 celery broker中间件 CELERY_BROKER_URL redis://127.0.0.1:6379/5 worker 名称 CRAWL_SEND_EMAIL_TASK crawl_service.crawl.send_email_task # 抓取发送邮件任务CRAWL_SEND_MSG_TASK crawl_service.crawl.send_msg_task # 抓取发送短信任务settings Config()celery应用程序模块配置相关 celery_base.celery_app.py
import os
import sys
import time
import celery
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))from config import settings# 实例化celery对象
celery_app celery.Celery(settings.PROJECT_NAME,backendsettings.CELERY_BACKEND_URL,brokersettings.CELERY_BROKER_URL,include[tasks.crawl_send_email,tasks.crawl_send_msg,],
)# 任务路由
task_routes {settings.CRAWL_SEND_EMAIL_TASK: {queue: f{settings.CRAWL_SEND_EMAIL_TASK}_queue},settings.CRAWL_SEND_MSG_TASK: {queue: f{settings.CRAWL_SEND_MSG_TASK}_queue},
}
# 任务去重
celery_once {backend: celery_once.backends.Redis,settings: {url: settings.CELERY_BACKEND_URL, default_timeout: 60 * 60},
}
# 配置文件
celery_app.conf.update(task_serializerjson,result_serializerjson,accept_content[json],task_default_queuenormal,timezoneAsia/Shanghai,enable_utcFalse,task_routestask_routes,task_ignore_resultTrue,redis_max_connections100,result_expires3600,ONCEcelery_once,
)抓取基类 crawl_worker_base.py
from celery_once import QueueOnceclass CrawlBase(QueueOnce):抓取worker基类name Noneonce {graceful: True}ignore_result True发送邮件任务 crawl_send_email.py
import os
import sys
import time
import celery
from loguru import logger
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))from config import settings
from celery_base.celery_app import celery_app
from tasks.crawl_worker_base import CrawlBase执行命令:
celery -A tasks.crawl_send_email worker -l info -Q crawl_service.crawl.send_email_task_queueclass SendEmailClass(CrawlBase):name settings.CRAWL_SEND_EMAIL_TASKdef __init__(self, *args, **kwargs):super(SendEmailClass, self).__init__(*args, **kwargs)def run(self, name):logger.info(class的方式, 向%s发送邮件... % name)time.sleep(5)logger.info(class的方式, 向%s发送邮件完成 % name)return f成功拿到{name}发送的邮件!send_email celery_app.register_task(SendEmailClass())发送短信 crawl_send_msg.py
import os
import sys
import time
import celery
from loguru import logger
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))
from config import settings
from celery_base.celery_app import celery_app
from tasks.crawl_worker_base import CrawlBase执行命令:
celery -A tasks.crawl_send_msg worker -l info -Q crawl_service.crawl.send_msg_task_queueclass SendMsgClass(CrawlBase):name settings.CRAWL_SEND_MSG_TASKdef __init__(self, *args, **kwargs):super(SendMsgClass, self).__init__(*args, **kwargs)def run(self, name):logger.info(class的方式, 向%s发送短信... % name)time.sleep(5)logger.info(class的方式, 向%s发送短信完成 % name)return f成功拿到{name}发送的短信!send_msg celery_app.register_task(SendMsgClass())发送邮件任务-调度器 send_email_scheduler.py
import sys
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))from config import settings
from celery_base.celery_app import celery_appif __name__ __main__:for i in range(100):result celery_app.send_task(namesettings.CRAWL_SEND_EMAIL_TASK, args(f张三嘿嘿{i},))print(result.id)发送短信任务-调度器 send_msg_scheduler.py
import os
import sys
import time
from pathlib import PathBASE_DIR Path(__file__).resolve().parent.parent
sys.path.append(str(BASE_DIR))from config import settings
from celery_base.celery_app import celery_appif __name__ __main__:for i in range(100, 500):result celery_app.send_task(namesettings.CRAWL_SEND_MSG_TASK, args(f李四哈哈哈{i},))print(result.id)准备工作已经做好紧接着分别执行命令
celery -A tasks.crawl_send_email worker -l info -Q crawl_service.crawl.send_email_task_queuecelery -A tasks.crawl_send_msg worker -l info -Q crawl_service.crawl.send_msg_task_queue出现下面效果就代表celery启动成功 最后只要发送任务即可在redis中就可以看到专门指定的两个队列了。 看下运行过程中的日志 一个简单的celery 队列就实现了。