我的第一只小爬虫

spider.py

import scrapy
from meinv.items import  MeinvItem
class MyScrapy (scrapy.Spider):
    name =  "my"
    allowed_domains = ["moko.cc"]
    start_urls = [
        "http://www.moko.cc/post/aaronsky/list.html"
    ]

    def parse(self, response):
        item = MeinvItem()


        item['image_urls'] = response.xpath('//img//@src2').extract()  # 提取图片链接
        yield item
        new_urls = response.xpath('//a[@class="coverBg wC"]//@href').extract()  # 跳页
        # print 'new_url',new_url
        for url in new_urls:
            print(url)
            yield  scrapy.Request("http://www.moko.cc"+url, callback=self.parse)

items.py

import scrapy
class MeinvItem(scrapy.Item):
    # define the fields for your item here like:
    # name = scrapy.Field()
    image_urls = scrapy.Field()

    pass


pipelines.py
import os
import urllib
from meinv import settings
class MeinvPipeline(object):
    def process_item(self, item, spider):
        dir_path = '%s/%s' % (settings.IMAGES_STORE, spider.name)  # 存储路径
        print('dir_path:'+dir_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        for image_url in item['image_urls']:
            list_name = image_url.split('/')
            file_name = "".join(list_name) +".jpg"
            # print 'filename',file_name
            file_path = '%s/%s' % (dir_path, file_name)
            # print 'file_path',file_path
            if os.path.exists(file_name):
                continue
            with open(file_path, 'wb') as file_writer:
                conn = urllib.urlopen(image_url)  # 下载图片
                file_writer.write(conn.read())
            file_writer.close()
        return item

setting.py

 BOT_NAME = 'meinv'

SPIDER_MODULES = ['meinv.spiders']
NEWSPIDER_MODULE = 'meinv.spiders'




IMAGES_STORE='/Users/zlinsun/Desktop/store'
DOWNLOAD_DELAY = 0.25
ROBOTSTXT_OBEY = True
DEPTH_LIMIT=10 #深度
ITEM_PIPELINES = {
   'meinv.pipelines.MeinvPipeline': 300,
}

运行 main.py

cmdline.execute("scrapy crawl my".split())
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值