python.scrapy 之crawlall命令执行所有的spiders

#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
# encoding=UTF-8
'''
Created on 2014年11月5日
@author: songs
'''
from scrapy.command import ScrapyCommand
from scrapy.utils.project import get_project_settings
from scrapy.crawler import Crawler
class Command(ScrapyCommand):
    requires_project = True
    def syntax(self):
        return '[options]'
    def short_desc(self):
        return 'Runs all of the spiders'

    def add_options(self, parser):
        ScrapyCommand.add_options(self, parser)

    def process_options(self, args, opts):
        ScrapyCommand.process_options(self, args, opts)

    def run(self, args, opts):  
        settings = get_project_settings()  
        crawler = self.crawler_process.create_crawler()
        for spider_name in crawler.spiders.list():  
            crawler = Crawler(settings)  
            crawler.configure()  
            spider = crawler.spiders.create(spider_name)  
            crawler.crawl(spider)  
            crawler.start()  
            
#             spider = crawler.spiders.create(spider_name, **opts.spargs)
#             crawler.crawl(spider)
    
        self.crawler_process.start()


/projectname/projectname/commands/:

__init__.py
crawlall.py

注:以上目录__init__.py必须存在

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值