一、示例一:Scrapy抓取豆瓣编程分类第一页的图书名称和链接并存入数据库
1. 要抓取的文件在items.py中定义,我们要抓取的是图书的名称和链接
2. spiders下的文件
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from second.items import bbs
class bbsSpider(BaseSpider):
name = "boat"
allow_domains =["http://book.douban.com/tag/编程?type=S"]
start_urls =["http://book.douban.com/tag/编程?type=S"]
def parse(self,response):
hxs = HtmlXPathSelector(response)
items = []
item = bbs()
item['title'] =hxs.select('//ul/li[position()>0]/div[2]/h2/a/@title').extract()
item['link'] =hxs.select('//ul/li[position()>0]/div[2]/h2/a/@href').extract()
items.append(item)
return items
3. pipelines文件,关于scrapy保存到数据库请看twisted的资料
# -*- coding: utf-8 -*-
# Define your item pipelines here
# Don't forget to add your pipeline to the ITEM_PIPELINESsetting
# See: http://doc.scrapy.org/topics/item-pipeline.html
from scrapy import log
from twisted.enterprise import adbapi
from scrapy.http import Request
from scrapy.exceptions import DropItem
from scrapy.contrib.pipeline.images import ImagesPipeline
import time
import MySQLdb
import MySQLdb.cursors
import socket
import select
import sys
import os
import errno
#连接数据库
class MySQLStorePipeline(object):
def__init__(self):
self.dbpool = adbapi.ConnectionPool('MySQLdb',
db = 'test',
user = 'root',
passwd = 'root',
cursorclass =MySQLdb.cursors.DictCursor,
charset = 'utf8',
use_unicode = False
)
#pipeline默认调用
def process_item(self,item, spider):
query = self.dbpool.runInteraction(self._conditional_insert,item)
return item
#将每行写入数据库中
def_conditional_insert(self, tx, item):
if item.get('title'):
for i in range(len(item['title'])):
tx.execute('insert into book values (%s, %s)',(item['title'][i], item['link'][i]))
4. 在setting.py中添加pipeline:
5. 需要提前在数据库中建立test数据库和book表。
6. 爬虫的结果如下:
{'link': [u'http://book.douban.com/subject/1885170/',
由上面的爬取结果可以看出,爬取结果是字典嵌套一个列表。所以在写数据库的时候,for循环中item['title']表示字典的每个键对应的值,len(item['title'])表示值的列表的长度。注意:如果写数据库不正确,数据库为空。
for i in range(len(item['title'])):
tx.execute('insert into book values (%s, %s)',(item['title'][i], item['link'][i]))