废话不多说,上代码
1、以下代码为spider文件
import scrapy from car_home.items import che168Item class Che168Spider(scrapy.Spider): name = \'che168\' allowed_domains = [\'che168.com\'] start_urls = [\'https://www.che168.com/beijing/list/\'] def parse(self, response): #获取多个列表页链接,爬取多页数据 max_page = response.xpath(\'//div[@id="listpagination"]/a/text()\')[-1].extract() base_url = \'https://www.che168.com/beijing/a0_0msdgscncgpi1ltocsp{}exx0/\' for i in range(1,int(max_page)+1): url = base_url.format(i) yield scrapy.Request(url,callback=self.parse_list) def parse_list(self, response): titles = response.xpath(\'//div[@class="title"]/a/text()\').extract() travel = response.xpath(\'//div[@class="details"]/ul/li[1]/span[1]/text()\').extract() year = response.xpath(\'//div[@class="details"]/ul/li[2]/span[1]/text()\').extract() type = response.xpath(\'//div[@class="details"]/ul/li[3]/span[1]/text()\').extract() address = response.xpath(\'//div[@class="details"]/ul/li[4]/span[1]/text()\').extract() standard = response.xpath(\'//div[@class="details"]/ul/li[5]/span[1]/text()\').extract() nums = response.xpath(\'//div[@class="details"]/ul/li[6]/span[1]/text()\').extract() item = che168Item() for i in range(0,len(titles)): item[\'titles\'] = titles[i] item[\'travel\'] = travel[i] item[\'year\'] = year[i] item[\'type\'] = type[i] item[\'address\'] = address[i] item[\'standard\'] = standard[i] item[\'nums\'] = nums[i] #print(item) yield item
2、items
class che168Item(scrapy.Item): titles =scrapy.Field() travel =scrapy.Field() year =scrapy.Field() type =scrapy.Field() address =scrapy.Field() standard =scrapy.Field() nums =scrapy.Field() def get_insert_sql(self): sql = \'insert into car_home(titles,travel,year,type,address,standard,nums) values(%s, %s, %s, %s, %s, %s, %s)\' data = (self[\'titles\'],self[\'travel\'],self[\'year\'],self[\'type\'],self[\'address\'],self[\'standard\'],self[\'nums\']) return (sql,data)
3、pipeline
from car_home.sql_unit import SqlUnit class CarHomePipeline(object): def process_item(self, item, spider): return item class che168Pipeline(object): def process_item(self, item, spider): s = SqlUnit() (sql,data) = item.get_insert_sql() s.execute_sql(sql,data) return item
4、数据库存储封装
import pymysql class SqlUnit(object): def __init__(self): self.db = pymysql.connect("127.0.0.1","root","123456","home") self.cursor = self.db.cursor() def execute_sql(self,sql,data=None): self.cursor.execute(sql,data) self.db.commit() def __del__(self): self.cursor.close() self.db.close()
5、settings中将che168Pipeline加入ITEM_PIPELINES