# -*- coding: utf-8 -*- # Scrapy settings for elabSpider project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # https://doc.scrapy.org/en/latest/topics/settings.html # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html # https://doc.scrapy.org/en/latest/topics/spider-middleware.html BOT_NAME = 'elabSpider' SPIDER_MODULES = ['elabSpider.spiders'] NEWSPIDER_MODULE = 'elabSpider.spiders' # MONGO_HOST = '139.196.5.59' #测试环境 # MONGO_HOST = '139.196.108.59' #正式环境,已废弃 MONGO_HOST = 'mongodb://logdb:logdb@dds-uf6da0fedc9881d41450-pub.mongodb.rds.aliyuncs.com:3717,dds-uf6da0fedc9881d42459-pub.mongodb.rds.aliyuncs.com:3717/logdb?replicaSet=mgset-12835903' MONGO_PORT = 27017 MONGO_DB = 'logdb' MONGO_COLL = 'ershoufang' MONGO_USER = 'dbuser' MONGO_PSW = 'elab@123' PROXY_HOST = 'http://http-dyn.abuyun.com' PROXY_PORT = '9020' # PROXY_IDENTIFY = 'HY39548V0FZ45UKD' # PROXY_SECRETKEY = '07DBA6C5E470150B' USER_AGENT_PATH = 'fake_useragent.json' # LOG_FILE = 'spider.log' # Crawl responsibly by identifying yourself (and your website) on the user-agent #USER_AGENT = 'elabSpider (+http://www.yourdomain.com)' # Obey robots.txt rules ROBOTSTXT_OBEY = False # Configure maximum concurrent requests performed by Scrapy (default: 16) #CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs #DOWNLOAD_DELAY = 3 # The download delay setting will honor only one of: #CONCURRENT_REQUESTS_PER_DOMAIN = 16 CONCURRENT_REQUESTS_PER_IP = 5 # Disable cookies (enabled by default) COOKIES_ENABLED = False # COOKIES_DEBUG = True # Disable Telnet Console (enabled by default) #TELNETCONSOLE_ENABLED = False # Override the default request headers: #DEFAULT_REQUEST_HEADERS = { # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # 'Accept-Language': 'en', #} # Enable or disable spider middlewares # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html #SPIDER_MIDDLEWARES = { # 'elabSpider.middlewares.ElabspiderSpiderMiddleware': 543, #} # Enable or disable downloader middlewares # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html DOWNLOADER_MIDDLEWARES = { 'elabSpider.middlewares.UserAgent': 1, # 'elabSpider.middlewares.TooManyRequestsRetryMiddleware': 500, 'elabSpider.middlewares.ElabspiderDownloaderMiddleware': 543, 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 550, 'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None, 'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware': None, 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware': None, # 'scrapy.downloadermiddlewares.retry.RetryMiddleware': None } # Enable or disable extensions # See https://doc.scrapy.org/en/latest/topics/extensions.html #EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, #} # Configure item pipelines # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html ITEM_PIPELINES = { 'elabSpider.pipelines.ElabspiderPipeline': 300, } # Enable and configure the AutoThrottle extension (disabled by default) # See https://doc.scrapy.org/en/latest/topics/autothrottle.html # AUTOTHROTTLE_ENABLED = True # The initial download delay # AUTOTHROTTLE_START_DELAY = 0.25 # The maximum download delay to be set in case of high latencies # AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server # AUTOTHROTTLE_TARGET_CONCURRENCY = 5.0 # Enable showing throttling stats for every response received: # AUTOTHROTTLE_DEBUG = True # Enable and configure HTTP caching (disabled by default) # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings #HTTPCACHE_ENABLED = True #HTTPCACHE_EXPIRATION_SECS = 0 #HTTPCACHE_DIR = 'httpcache' #HTTPCACHE_IGNORE_HTTP_CODES = [] #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' REDIRECT_ENABLED = False RETRY_ENABLED = True RETRY_TIMES = 10 RETRY_HTTP_CODES = [403, 429, 404, 301, 302, 503] HTTPERROR_ALLOWED_CODES = [403, 429, 404, 301, 302, 503] DOWNLOAD_TIMEOUT = 15 # RANDOMIZE_DOWNLOAD_DELAY = False # CONCURRENT_REQUESTS_PER_IP = 40