scrapy預設設定
BOT_NAME = ‘scrapybot’
CLOSESPIDER_TIMEOUT = 0
CLOSESPIDER_PAGECOUNT = 0
CLOSESPIDER_ITEMCOUNT = 0
CLOSESPIDER_ERRORCOUNT = 0
COMMANDS_MODULE = ”
CONCURRENT_ITEMS = 100
CONCURRENT_REQUESTS = 16
CONCURRENT_REQUESTS_PER_DOMAIN = 8
CONCURRENT_REQUESTS_PER_IP = 0
COOKIES_ENABLED = True
COOKIES_DEBUG = False
DEFAULT_ITEM_CLASS = ‘scrapy.item.Item’
DEFAULT_REQUEST_HEADERS = {
‘Accept’: ‘text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8′,
‘Accept-Language’: ‘en’,
}
DEPTH_LIMIT = 0
DEPTH_STATS = True
DEPTH_PRIORITY = 0
DNSCACHE_ENABLED = True
DOWNLOAD_DELAY = 0
DOWNLOAD_HANDLERS = {}
DOWNLOAD_HANDLERS_BASE = {
‘file’: ‘scrapy.core.downloader.handlers.file.FileDownloadHandler’,
‘http’: ‘scrapy.core.downloader.handlers.http.HttpDownloadHandler’,
‘https’: ‘scrapy.core.downloader.handlers.http.HttpDownloadHandler’,
‘s3′: ‘scrapy.core.downloader.handlers.s3.S3DownloadHandler’,
}
DOWNLOAD_TIMEOUT = 180 # 3mins
DOWNLOADER_DEBUG = False
DOWNLOADER_HTTPCLIENTFACTORY = ‘scrapy.core.downloader.webclient.ScrapyHTTPClientFactory’
DOWNLOADER_CLIENTCONTEXTFACTORY = ‘scrapy.core.downloader.webclient.ScrapyClientContextFactory’
DOWNLOADER_MIDDLEWARES = {}
DOWNLOADER_MIDDLEWARES_BASE = {
# Engine side
‘scrapy.contrib.downloadermiddleware.robotstxt.RobotsTxtMiddleware’: 100,
‘scrapy.contrib.downloadermiddleware.httpauth.HttpAuthMiddleware’: 300,
‘scrapy.contrib.downloadermiddleware.downloadtimeout.DownloadTimeoutMiddleware’: 350,
‘scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware’: 400,
‘scrapy.contrib.downloadermiddleware.retry.RetryMiddleware’: 500,
‘scrapy.contrib.downloadermiddleware.defaultheaders.DefaultHeadersMiddleware’: 550,
‘scrapy.contrib.downloadermiddleware.redirect.RedirectMiddleware’: 600,
‘scrapy.contrib.downloadermiddleware.cookies.CookiesMiddleware’: 700,
‘scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware’: 750,
‘scrapy.contrib.downloadermiddleware.httpcompression.HttpCompressionMiddleware’: 800,
‘scrapy.contrib.downloadermiddleware.chunked.ChunkedTransferMiddleware’: 830,
‘scrapy.contrib.downloadermiddleware.stats.DownloaderStats’: 850,
‘scrapy.contrib.downloadermiddleware.httpcache.HttpCacheMiddleware’: 900,
# Downloader side
}
DOWNLOADER_STATS = True
DUPEFILTER_CLASS = ‘scrapy.dupefilter.RFPDupeFilter’
try:
EDITOR = os.environ['EDITOR']
except KeyError:
if sys.platform == ‘win32′:
EDITOR = ‘%s -m idlelib.idle’
else:
EDITOR = ‘vi’
EXTENSIONS = {}
EXTENSIONS_BASE = {
‘scrapy.contrib.corestats.CoreStats’: 0,
‘scrapy.webservice.WebService’: 0,
‘scrapy.telnet.TelnetConsole’: 0,
‘scrapy.contrib.memusage.MemoryUsage’: 0,
‘scrapy.contrib.memdebug.MemoryDebugger’: 0,
‘scrapy.contrib.closespider.CloseSpider’: 0,
‘scrapy.contrib.feedexport.FeedExporter’: 0,
‘scrapy.contrib.logstats.LogStats’: 0,
‘scrapy.contrib.spiderstate.SpiderState’: 0,
‘scrapy.contrib.throttle.AutoThrottle’: 0,
}
FEED_URI = None
FEED_URI_PARAMS = None # a function to extend uri arguments
FEED_FORMAT = ‘jsonlines’
FEED_STORE_EMPTY = False
FEED_STORAGES = {}
FEED_STORAGES_BASE = {
”: ‘scrapy.contrib.feedexport.FileFeedStorage’,
‘file’: ‘scrapy.contrib.feedexport.FileFeedStorage’,
‘stdout’: ‘scrapy.contrib.feedexport.StdoutFeedStorage’,
‘s3′: ‘scrapy.contrib.feedexport.S3FeedStorage’,
‘ftp’: ‘scrapy.contrib.feedexport.FTPFeedStorage’,
}
FEED_EXPORTERS = {}
FEED_EXPORTERS_BASE = {
‘json’: ‘scrapy.contrib.exporter.JsonItemExporter’,
‘jsonlines’: ‘scrapy.contrib.exporter.JsonLinesItemExporter’,
‘csv’: ‘scrapy.contrib.exporter.CsvItemExporter’,
‘xml’: ‘scrapy.contrib.exporter.XmlItemExporter’,
‘marshal’: ‘scrapy.contrib.exporter.MarshalItemExporter’,
‘pickle’: ‘scrapy.contrib.exporter.PickleItemExporter’,
}
HTTPCACHE_ENABLED = False
HTTPCACHE_DIR = ‘httpcache’
HTTPCACHE_IGNORE_MISSING = False
HTTPCACHE_STORAGE = ‘scrapy.contrib.httpcache.DbmCacheStorage’
HTTPCACHE_EXPIRATION_SECS = 0
HTTPCACHE_IGNORE_HTTP_CODES = []
HTTPCACHE_IGNORE_SCHEMES = ['file']
HTTPCACHE_DBM_MODULE = ‘anydbm’
ITEM_PROCESSOR = ‘scrapy.contrib.pipeline.ItemPipelineManager’
# Item pipelines are typically set in specific commands settings
ITEM_PIPELINES = []
LOG_ENABLED = True
LOG_ENCODING = ‘utf-8′
LOG_FORMATTER = ‘scrapy.logformatter.LogFormatter’
LOG_STDOUT = False
LOG_LEVEL = ‘DEBUG’
LOG_FILE = None
LOG_UNSERIALIZABLE_REQUESTS = False
LOGSTATS_INTERVAL = 60.0
MAIL_DEBUG = False
MAIL_HOST = ‘localhost’
MAIL_PORT = 25
MAIL_FROM = ‘scrapy@localhost’
MAIL_PASS = None
MAIL_USER = None
MEMDEBUG_ENABLED = False # enable memory debugging
MEMDEBUG_NOTIFY = [] # send memory debugging report by mail at engine shutdown
MEMUSAGE_ENABLED = False
MEMUSAGE_LIMIT_MB = 0
MEMUSAGE_NOTIFY_MAIL = []
MEMUSAGE_REPORT = False
MEMUSAGE_WARNING_MB = 0
NEWSPIDER_MODULE = ”
RANDOMIZE_DOWNLOAD_DELAY = True
REDIRECT_ENABLED = True
REDIRECT_MAX_METAREFRESH_DELAY = 100
REDIRECT_MAX_TIMES = 20 # uses Firefox default setting
REDIRECT_PRIORITY_ADJUST = +2
REFERER_ENABLED = True
RETRY_ENABLED = True
RETRY_TIMES = 2 # initial response + 2 retries = 3 requests
RETRY_HTTP_CODES = [500, 503, 504, 400, 408]
RETRY_PRIORITY_ADJUST = -1
ROBOTSTXT_OBEY = False
SCHEDULER = ‘scrapy.core.scheduler.Scheduler’
SCHEDULER_DISK_QUEUE = ‘scrapy.squeue.PickleLifoDiskQueue’
SCHEDULER_MEMORY_QUEUE = ‘scrapy.squeue.LifoMemoryQueue’
SPIDER_MANAGER_CLASS = ‘scrapy.spidermanager.SpiderManager’
SPIDER_MIDDLEWARES = {}
SPIDER_MIDDLEWARES_BASE = {
# Engine side
‘scrapy.contrib.spidermiddleware.httperror.HttpErrorMiddleware’: 50,
‘scrapy.contrib.spidermiddleware.offsite.OffsiteMiddleware’: 500,
‘scrapy.contrib.spidermiddleware.referer.RefererMiddleware’: 700,
‘scrapy.contrib.spidermiddleware.urllength.UrlLengthMiddleware’: 800,
‘scrapy.contrib.spidermiddleware.depth.DepthMiddleware’: 900,
# Spider side
}
SPIDER_MODULES = []
STATS_CLASS = ‘scrapy.statscol.MemoryStatsCollector’
STATS_DUMP = True
STATSMAILER_RCPTS = []
TEMPLATES_DIR = abspath(join(dirname(__file__), ‘..’, ‘templates’))
URLLENGTH_LIMIT = 2083
USER_AGENT = ‘Scrapy/%s (+http://scrapy.org)’ % __import__(‘scrapy’).__version__
TELNETCONSOLE_ENABLED = 1
TELNETCONSOLE_PORT = [6023, 6073]
TELNETCONSOLE_HOST = ’0.0.0.0′
WEBSERVICE_ENABLED = True
WEBSERVICE_LOGFILE = None
WEBSERVICE_PORT = [6080, 7030]
WEBSERVICE_HOST = ’0.0.0.0′
WEBSERVICE_RESOURCES = {}
WEBSERVICE_RESOURCES_BASE = {
‘scrapy.contrib.webservice.crawler.CrawlerResource’: 1,
‘scrapy.contrib.webservice.enginestatus.EngineStatusResource’: 1,
‘scrapy.contrib.webservice.stats.StatsResource’: 1,
}
SPIDER_CONTRACTS = {}
SPIDER_CONTRACTS_BASE = {
‘scrapy.contracts.default.UrlContract’ : 1,
‘scrapy.contracts.default.ReturnsContract’: 2,
‘scrapy.contracts.default.ScrapesContract’: 3,
}
相關文章
- react設定預設state和預設propsReact
- scrapy 一些設定和問題
- matplotlib預設字型設定
- laydate設定預設時分秒
- sql設定欄位預設值SQL
- 設定maven預設的jdk版本MavenJDK
- django 設定預設值到SQLDjangoSQL
- input元素預設選中設定
- 預設角色的設定和修改
- win10電腦 tls安全設定怎麼設定為預設設定Win10TLS
- 設定鎖定預設瀏覽器為IE瀏覽器
- win10預設程式怎麼設定介面_win10設定預設程式的方法Win10
- win10設定預設軟體的方法_win10預設軟體怎麼設定Win10
- wordpress 設定編輯器預設模式模式
- idea中設定maven預設位置IdeaMaven
- webpack4 Mode的預設設定Web
- nginx設定預設訪問頁面Nginx
- ANSYS18預設單位設定
- IIS設定預設主頁無效
- Windows XP 預設設定修改攻略(轉)Windows
- 設定 Teredo 伺服器,預設為:伺服器
- 寶塔如何設定預設站點
- win10預設開啟方式設定方法_windows10預設開啟方式怎麼設定Win10Windows
- win10如何設定系統預設播放_win10設定預設影片播放器的方法Win10播放器
- 填報流水號,預設表示式設定
- MySQL欄位預設值設定詳解MySql
- Win10怎麼設定預設程式Win10
- 【easyUI】date-box 設定預設時間UI
- javascript為函式設定預設引數JavaScript函式
- 設定ibus為預設輸入法
- 修改sqlplus預設環境設定SQL
- AIX7.1 VMO 引數預設設定AI
- Python 設定系統預設編碼Python
- windows預設閘道器設定錯誤Windows
- 小型機設定WebSphere的預設編碼Web
- 恢復IpTables的預設設定(Script)(轉)
- vi 預設設定行號和縮排
- HTML元素的預設CSS設定介紹HTMLCSS