lfs_resold.py 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. # -*- coding:utf-8 -*-
  2. # @Time : 2018/4/27 10:50 AM
  3. # @Author : Swing
  4. import scrapy
  5. from elabSpider.items import ResoldHouseItem
  6. import logging
  7. import traceback
  8. from elabSpider.email_util import send_email
  9. class RentalHouseSpider(scrapy.Spider):
  10. name = 'lfsresoldHouse'
  11. allowed_domains = [
  12. 'nb.anjuke.com'
  13. ]
  14. start_urls = [
  15. 'https://nb.anjuke.com/community/props/sale/1003094/'
  16. ]
  17. def parse(self, response):
  18. try:
  19. community_list = response.xpath('//ul[@class="m-house-list"]/li/a/@href').extract()
  20. if community_list:
  21. for community_url in community_list:
  22. yield scrapy.Request(community_url, callback=self.parse_item)
  23. except Exception as err:
  24. send_email('sjkresoldHouse get detail url error', response._url + '\n' + traceback.format_exc())
  25. logging.error('get detail url error ! url: ' + response._url + " reason: " + '-'.join(err.args))
  26. try:
  27. next_page = response.xpath(r'//div[@class="m-page"]/div[@class="multi-page"]/a[@class="aNxt"]/@href').extract_first()
  28. if next_page:
  29. yield scrapy.Request(next_page, callback=self.parse)
  30. except Exception as err:
  31. send_email('sjkresoldHouse get next page url error', response._url + '\n' + traceback.format_exc())
  32. logging.error('get next page url error ! url: ' + response._url + " reason: " + '-'.join(err.args))
  33. def parse_item(self, response):
  34. try:
  35. item = ResoldHouseItem.handle_response(response)
  36. yield item
  37. except Exception as err:
  38. send_email('sjkresoldHouse parse response error', response._url + '\n' + traceback.format_exc())
  39. logging.error('parse response error ! url: ' + response._url + " reason: " + '-'.join(err.args))