#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
# encoding=UTF-8
'''
Created on 2014年11月5日
@author: songs
'''
from scrapy.command import ScrapyCommand
from scrapy.utils.project import get_project_settings
from scrapy.crawler import Crawler
class Command(ScrapyCommand):
requires_project = True
def syntax(self):
return '[options]'
def short_desc(self):
return 'Runs all of the spiders'
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
def process_options(self, args, opts):
ScrapyCommand.process_options(self, args, opts)
def run(self, args, opts):
settings = get_project_settings()
crawler = self.crawler_process.create_crawler()
for spider_name in crawler.spiders.list():
crawler = Crawler(settings)
crawler.configure()
spider = crawler.spiders.create(spider_name)
crawler.crawl(spider)
crawler.start()
# spider = crawler.spiders.create(spider_name, **opts.spargs)
# crawler.crawl(spider)
self.crawler_process.start()
/projectname/projectname/commands/:
__init__.py
crawlall.py
注:以上目录__init__.py必须存在