python - Limit how much elements scrapy can collect -
i using scrapy collect data. scrapy program collects 100 elements @ 1 session. need limit 50 or random number. how can that? solution welcomed. in advance
# -*- coding: utf-8 -*- import re import scrapy class dmozitem(scrapy.item): # define fields item here like: link = scrapy.field() attr = scrapy.field() title = scrapy.field() tag = scrapy.field() class dmozspider(scrapy.spider): name = "dmoz" allowed_domains = ["raleigh.craigslist.org"] start_urls = [ "http://raleigh.craigslist.org/search/bab" ] base_url = 'http://raleigh.craigslist.org/' def parse(self, response): links = response.xpath('//a[@class="hdrlnk"]/@href').extract() link in links: absolute_url = self.base_url + link yield scrapy.request(absolute_url, callback=self.parse_attr) def parse_attr(self, response): match = re.search(r"(\w+)\.html", response.url) if match: item_id = match.group(1) url = self.base_url + "reply/ral/bab/" + item_id item = dmozitem() item["link"] = response.url item["title"] = "".join(response.xpath("//span[@class='postingtitletext']//text()").extract()) item["tag"] = "".join(response.xpath("//p[@class='attrgroup']/span/b/text()").extract()[0]) return scrapy.request(url, meta={'item': item}, callback=self.parse_contact) def parse_contact(self, response): item = response.meta['item'] item["attr"] = "".join(response.xpath("//div[@class='anonemail']//text()").extract()) return item
this closespider
extension , closespider_itemcount
setting made for:
an integer specifies number of items. if spider scrapes more amount if items , items passed item pipeline, spider closed reason closespider_itemcount. if 0 (or non set), spiders won’t closed number of passed items.
Comments
Post a Comment