settings.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. # -*- coding: utf-8 -*-
  2. # Scrapy settings for elabSpider project
  3. #
  4. # For simplicity, this file contains only settings considered important or
  5. # commonly used. You can find more settings consulting the documentation:
  6. #
  7. # https://doc.scrapy.org/en/latest/topics/settings.html
  8. # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
  9. # https://doc.scrapy.org/en/latest/topics/spider-middleware.html
  10. BOT_NAME = 'elabSpider'
  11. SPIDER_MODULES = ['elabSpider.spiders']
  12. NEWSPIDER_MODULE = 'elabSpider.spiders'
  13. # MONGO_HOST = '139.196.5.59' #测试环境
  14. # MONGO_HOST = '139.196.108.59' #正式环境,已废弃
  15. # MONGO_HOST = 'mongodb://logdb:logdb@dds-uf6da0fedc9881d41450-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6da0fedc9881d42459-pub.mongodb.rds.aliyuncs.com:3717/logdb?replicaSet=mgset-12835903'
  16. MONGO_HOST = 'mongodb://logdb:logdb@dds-uf6da0fedc9881d41154-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6da0fedc9881d42456-pub.mongodb.rds.aliyuncs.com:3717/logdb?replicaSet=mgset-12835903'
  17. MONGO_PORT = 27017
  18. MONGO_DB = 'logdb'
  19. MONGO_COLL = 'ershoufang'
  20. MONGO_USER = 'dbuser'
  21. MONGO_PSW = 'elab@123'
  22. PROXY_HOST = 'http://http-dyn.abuyun.com'
  23. PROXY_PORT = '9020'
  24. # PROXY_IDENTIFY = 'HY39548V0FZ45UKD'
  25. # PROXY_SECRETKEY = '07DBA6C5E470150B'
  26. USER_AGENT_PATH = 'fake_useragent.json'
  27. # LOG_FILE = 'spider.log'
  28. # Crawl responsibly by identifying yourself (and your website) on the user-agent
  29. #USER_AGENT = 'elabSpider (+http://www.yourdomain.com)'
  30. # Obey robots.txt rules
  31. ROBOTSTXT_OBEY = False
  32. # Configure maximum concurrent requests performed by Scrapy (default: 16)
  33. #CONCURRENT_REQUESTS = 32
  34. # Configure a delay for requests for the same website (default: 0)
  35. # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
  36. # See also autothrottle settings and docs
  37. #DOWNLOAD_DELAY = 3
  38. # The download delay setting will honor only one of:
  39. #CONCURRENT_REQUESTS_PER_DOMAIN = 16
  40. CONCURRENT_REQUESTS_PER_IP = 5
  41. # Disable cookies (enabled by default)
  42. COOKIES_ENABLED = False
  43. # COOKIES_DEBUG = True
  44. # Disable Telnet Console (enabled by default)
  45. #TELNETCONSOLE_ENABLED = False
  46. # Override the default request headers:
  47. #DEFAULT_REQUEST_HEADERS = {
  48. # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
  49. # 'Accept-Language': 'en',
  50. #}
  51. # Enable or disable spider middlewares
  52. # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
  53. #SPIDER_MIDDLEWARES = {
  54. # 'elabSpider.middlewares.ElabspiderSpiderMiddleware': 543,
  55. #}
  56. # Enable or disable downloader middlewares
  57. # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
  58. DOWNLOADER_MIDDLEWARES = {
  59. 'elabSpider.middlewares.UserAgent': 1,
  60. # 'elabSpider.middlewares.TooManyRequestsRetryMiddleware': 500,
  61. 'elabSpider.middlewares.ElabspiderDownloaderMiddleware': 543,
  62. 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 550,
  63. 'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
  64. 'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware': None,
  65. 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware': None,
  66. # 'scrapy.downloadermiddlewares.retry.RetryMiddleware': None
  67. }
  68. # Enable or disable extensions
  69. # See https://doc.scrapy.org/en/latest/topics/extensions.html
  70. #EXTENSIONS = {
  71. # 'scrapy.extensions.telnet.TelnetConsole': None,
  72. #}
  73. # Configure item pipelines
  74. # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
  75. ITEM_PIPELINES = {
  76. 'elabSpider.pipelines.ElabspiderPipeline': 300,
  77. }
  78. # Enable and configure the AutoThrottle extension (disabled by default)
  79. # See https://doc.scrapy.org/en/latest/topics/autothrottle.html
  80. # AUTOTHROTTLE_ENABLED = True
  81. # The initial download delay
  82. # AUTOTHROTTLE_START_DELAY = 0.25
  83. # The maximum download delay to be set in case of high latencies
  84. # AUTOTHROTTLE_MAX_DELAY = 60
  85. # The average number of requests Scrapy should be sending in parallel to
  86. # each remote server
  87. # AUTOTHROTTLE_TARGET_CONCURRENCY = 5.0
  88. # Enable showing throttling stats for every response received:
  89. # AUTOTHROTTLE_DEBUG = True
  90. # Enable and configure HTTP caching (disabled by default)
  91. # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
  92. #HTTPCACHE_ENABLED = True
  93. #HTTPCACHE_EXPIRATION_SECS = 0
  94. #HTTPCACHE_DIR = 'httpcache'
  95. #HTTPCACHE_IGNORE_HTTP_CODES = []
  96. #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
  97. REDIRECT_ENABLED = False
  98. RETRY_ENABLED = True
  99. RETRY_TIMES = 10
  100. RETRY_HTTP_CODES = [403, 429, 404, 301, 302, 503]
  101. HTTPERROR_ALLOWED_CODES = [403, 429, 404, 301, 302, 503]
  102. DOWNLOAD_TIMEOUT = 15
  103. # RANDOMIZE_DOWNLOAD_DELAY = False
  104. # CONCURRENT_REQUESTS_PER_IP = 40