<!DOCTYPE HTML>
<html lang="en" >
    <!-- Start book Python爬虫课程讲义 -->
    <head>
        <!-- head:start -->
        <meta charset="UTF-8">
        <meta http-equiv="X-UA-Compatible" content="IE=edge" />
        <title>CrawlSpiders | Python爬虫课程讲义</title>
        <meta content="text/html; charset=utf-8" http-equiv="Content-Type">
        <meta name="description" content="">
        <meta name="generator" content="GitBook 2.6.7">
        <meta name="author" content="BigCat">
        
        <meta name="HandheldFriendly" content="true"/>
        <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
        <meta name="apple-mobile-web-app-capable" content="yes">
        <meta name="apple-mobile-web-app-status-bar-style" content="black">
        <link rel="apple-touch-icon-precomposed" sizes="152x152" href="../../gitbook/images/apple-touch-icon-precomposed-152.png">
        <link rel="shortcut icon" href="../../gitbook/images/favicon.ico" type="image/x-icon">
        
    <link rel="stylesheet" href="../../gitbook/style.css">
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-tbfed-pagefooter/footer.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-splitter/splitter.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-toggle-chapters/toggle.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-highlight/website.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-fontsettings/website.css">
        
    
    

        
    
    
    <link rel="next" href="../../file/part04/4.7.html" />
    
    
    <link rel="prev" href="../../file/part04/4.5.html" />
    

        <!-- head:end -->
    </head>
    <body>
        <!-- body:start -->
        
    <div class="book"
        data-level="4.6"
        data-chapter-title="CrawlSpiders"
        data-filepath="file/part04/4.6.md"
        data-basepath="../.."
        data-revision="Thu Feb 09 2017 09:48:59 GMT+0800 (CST)"
        data-innerlanguage="">
    

<div class="book-summary">
    <nav role="navigation">
        <ul class="summary">
            
            
            
            

            

            
    
        <li class="chapter " data-level="0" data-path="index.html">
            
                
                    <a href="../../index.html">
                
                        <i class="fa fa-check"></i>
                        
                        传智播客Python学院爬虫课程
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1" data-path="file/part01/1.html">
            
                
                    <a href="../../file/part01/1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.</b>
                        
                        爬虫原理与数据抓取
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="1.1" data-path="file/part01/1.1.html">
            
                
                    <a href="../../file/part01/1.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.1.</b>
                        
                        (了解)通用爬虫和聚焦爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.2" data-path="file/part01/1.2.html">
            
                
                    <a href="../../file/part01/1.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.2.</b>
                        
                        (复习)HTTP/HTTPS的请求与响应
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.3" data-path="file/part01/1.3.html">
            
                
                    <a href="../../file/part01/1.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.3.</b>
                        
                        HTTP/HTTPS抓包工具-Fiddler
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.4" data-path="file/part01/1.4.html">
            
                
                    <a href="../../file/part01/1.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.4.</b>
                        
                        urllib2模块的基本使用
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.5" data-path="file/part01/1.5.html">
            
                
                    <a href="../../file/part01/1.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.5.</b>
                        
                        urllib2：GET请求和POST请求
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.6" data-path="file/part01/1.6.html">
            
                
                    <a href="../../file/part01/1.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.6.</b>
                        
                        urllib2：Handler处理器和自定义Opener
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.7" data-path="file/part01/1.7.html">
            
                
                    <a href="../../file/part01/1.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.7.</b>
                        
                        urllib2：URLError与HTTPError
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.8" data-path="file/part01/1.8.html">
            
                
                    <a href="../../file/part01/1.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.8.</b>
                        
                        Requests模块
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="2" data-path="file/part02/2.html">
            
                
                    <a href="../../file/part02/2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.</b>
                        
                        非结构化数据与结构化数据提取
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="2.1" data-path="file/part02/2.1.html">
            
                
                    <a href="../../file/part02/2.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.1.</b>
                        
                        正则表达式re模块
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.2" data-path="file/part02/2.2.html">
            
                
                    <a href="../../file/part02/2.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.2.</b>
                        
                        案例：使用正则表达式的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.3" data-path="file/part02/2.3.html">
            
                
                    <a href="../../file/part02/2.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.3.</b>
                        
                        XPath与lxml类库
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.4" data-path="file/part02/2.4.html">
            
                
                    <a href="../../file/part02/2.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.4.</b>
                        
                        案例：使用XPath的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.5" data-path="file/part02/2.5.html">
            
                
                    <a href="../../file/part02/2.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.5.</b>
                        
                        BeautifulSoup4 解析器
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.6" data-path="file/part02/2.6.html">
            
                
                    <a href="../../file/part02/2.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.6.</b>
                        
                        案例：使用bs4的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.7" data-path="file/part02/2.7.html">
            
                
                    <a href="../../file/part02/2.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.7.</b>
                        
                        JSON模块与JsonPath
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.8" data-path="file/part02/2.8.html">
            
                
                    <a href="../../file/part02/2.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.8.</b>
                        
                        糗事百科案例
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.9" data-path="file/part02/2.9.html">
            
                
                    <a href="../../file/part02/2.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.9.</b>
                        
                        多线程爬虫案例
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="3" data-path="file/part03/3.html">
            
                
                    <a href="../../file/part03/3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.</b>
                        
                        动态HTML处理和机器图像识别
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="3.1" data-path="file/part03/3.1.html">
            
                
                    <a href="../../file/part03/3.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.1.</b>
                        
                        动态HTML介绍
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.2" data-path="file/part03/3.2.html">
            
                
                    <a href="../../file/part03/3.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.2.</b>
                        
                        Selenium与PhantomJS
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.3" data-path="file/part03/3.3.html">
            
                
                    <a href="../../file/part03/3.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.3.</b>
                        
                        案例一：网站模拟登录
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.4" data-path="file/part03/3.4.html">
            
                
                    <a href="../../file/part03/3.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.4.</b>
                        
                        案例二：动态页面模拟点击
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.5" data-path="file/part03/3.5.html">
            
                
                    <a href="../../file/part03/3.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.5.</b>
                        
                        案例三：执行JavaScript语句
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.6" data-path="file/part03/3.6.html">
            
                
                    <a href="../../file/part03/3.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.6.</b>
                        
                        机器视觉与Tesseract介绍
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.7" data-path="file/part03/3.7.html">
            
                
                    <a href="../../file/part03/3.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.7.</b>
                        
                        处理一些格式规范的文字
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.8" data-path="file/part03/3.8.html">
            
                
                    <a href="../../file/part03/3.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.8.</b>
                        
                        案例：尝试对验证码进行机器识别处理
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.9" data-path="file/part03/3.9.html">
            
                
                    <a href="../../file/part03/3.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.9.</b>
                        
                        机器学习：训练Tesseract
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="4" data-path="file/part04/4.html">
            
                
                    <a href="../../file/part04/4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.</b>
                        
                        Scrapy框架
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="4.1" data-path="file/part04/4.1.html">
            
                
                    <a href="../../file/part04/4.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.1.</b>
                        
                        配置安装
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.2" data-path="file/part04/4.2.html">
            
                
                    <a href="../../file/part04/4.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.2.</b>
                        
                        入门案例
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.3" data-path="file/part04/4.3.html">
            
                
                    <a href="../../file/part04/4.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.3.</b>
                        
                        Scrapy Shell
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.4" data-path="file/part04/4.4.html">
            
                
                    <a href="../../file/part04/4.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.4.</b>
                        
                        Item Pipeline
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.5" data-path="file/part04/4.5.html">
            
                
                    <a href="../../file/part04/4.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.5.</b>
                        
                        Spiders
                    </a>
            
            
        </li>
    
        <li class="chapter active" data-level="4.6" data-path="file/part04/4.6.html">
            
                
                    <a href="../../file/part04/4.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.6.</b>
                        
                        CrawlSpiders
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.7" data-path="file/part04/4.7.html">
            
                
                    <a href="../../file/part04/4.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.7.</b>
                        
                        Request/Response
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.8" data-path="file/part04/4.8.html">
            
                
                    <a href="../../file/part04/4.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.8.</b>
                        
                        Downloader Middlewares
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.9" data-path="file/part04/4.9.html">
            
                
                    <a href="../../file/part04/4.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.9.</b>
                        
                        Settings
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="5" data-path="file/part05/5.html">
            
                
                    <a href="../../file/part05/5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.</b>
                        
                        Scrapy实战项目
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="5.1" data-path="file/part05/5.1.html">
            
                
                    <a href="../../file/part05/5.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.1.</b>
                        
                        (案例一)手机App抓包爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.2" data-path="file/part05/5.2.html">
            
                
                    <a href="../../file/part05/5.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.2.</b>
                        
                        (案例二)阳光热线问政平台爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.3" data-path="file/part05/5.3.html">
            
                
                    <a href="../../file/part05/5.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.3.</b>
                        
                        (案例三)新浪网分类资讯爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.4" data-path="file/part05/5.4.html">
            
                
                    <a href="../../file/part05/5.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.4.</b>
                        
                        (案例四)图片下载器爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.5" data-path="file/part05/5.5.html">
            
                
                    <a href="../../file/part05/5.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.5.</b>
                        
                        (案例五)将数据保存在MongoDB中
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.6" data-path="file/part05/5.6.html">
            
                
                    <a href="../../file/part05/5.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.6.</b>
                        
                        (案例六)三种scrapy模拟登陆策略
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.7" data-path="file/part05/5.7.html">
            
                
                    <a href="../../file/part05/5.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.7.</b>
                        
                        附：通过Fiddler进行手机抓包方法
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="6" data-path="file/part06/6.html">
            
                
                    <a href="../../file/part06/6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.</b>
                        
                        scrapy-redis分布式组件
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="6.1" data-path="file/part06/6.1.html">
            
                
                    <a href="../../file/part06/6.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.1.</b>
                        
                        源码分析参考：Connection
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.2" data-path="file/part06/6.2.html">
            
                
                    <a href="../../file/part06/6.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.2.</b>
                        
                        源码分析参考：Dupefilter
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.3" data-path="file/part06/6.3.html">
            
                
                    <a href="../../file/part06/6.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.3.</b>
                        
                        源码分析参考：Picklecompat
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.4" data-path="file/part06/6.4.html">
            
                
                    <a href="../../file/part06/6.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.4.</b>
                        
                        源码分析参考：Pipelines
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.5" data-path="file/part06/6.5.html">
            
                
                    <a href="../../file/part06/6.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.5.</b>
                        
                        源码分析参考：Queue
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.6" data-path="file/part06/6.6.html">
            
                
                    <a href="../../file/part06/6.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.6.</b>
                        
                        源码分析参考：Scheduler
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.7" data-path="file/part06/6.7.html">
            
                
                    <a href="../../file/part06/6.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.7.</b>
                        
                        源码分析参考：Spider
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="7" data-path="file/part07/7.html">
            
                
                    <a href="../../file/part07/7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.</b>
                        
                        scrapy-redis实战
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="7.1" data-path="file/part07/7.1.html">
            
                
                    <a href="../../file/part07/7.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.1.</b>
                        
                        源码自带项目说明
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.2" data-path="file/part07/7.2.html">
            
                
                    <a href="../../file/part07/7.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.2.</b>
                        
                        有缘网分布式爬虫项目1
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.3" data-path="file/part07/7.3.html">
            
                
                    <a href="../../file/part07/7.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.3.</b>
                        
                        有缘网分布式爬虫项目2
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.4" data-path="file/part07/7.4.html">
            
                
                    <a href="../../file/part07/7.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.4.</b>
                        
                        处理Redis里的数据
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.5" data-path="file/part07/7.5.html">
            
                
                    <a href="../../file/part07/7.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.5.</b>
                        
                        尝试改写新浪网分类资讯爬虫1
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.6" data-path="file/part07/7.6.html">
            
                
                    <a href="../../file/part07/7.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.6.</b>
                        
                        尝试改写新浪网分类资讯爬虫2
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.7" data-path="file/part07/7.7.html">
            
                
                    <a href="../../file/part07/7.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.7.</b>
                        
                        IT桔子分布式项目1
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.8" data-path="file/part07/7.8.html">
            
                
                    <a href="../../file/part07/7.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.8.</b>
                        
                        IT桔子分布式项目2
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="8" data-path="file/duanzi/duanzi.html">
            
                
                    <a href="../../file/duanzi/duanzi.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>8.</b>
                        
                        课余段子
                    </a>
            
            
        </li>
    


            
            <li class="divider"></li>
            <li>
                <a href="https://www.gitbook.com" target="blank" class="gitbook-link">
                    Published with GitBook
                </a>
            </li>
            
        </ul>
    </nav>
</div>

    <div class="book-body">
        <div class="body-inner">
            <div class="book-header" role="navigation">
    <!-- Actions Left -->
    

    <!-- Title -->
    <h1>
        <i class="fa fa-circle-o-notch fa-spin"></i>
        <a href="../../" >Python爬虫课程讲义</a>
    </h1>
</div>

            <div class="page-wrapper" tabindex="-1" role="main">
                <div class="page-inner">
                
                
                    <section class="normal" id="section-">
                    
                        <h1 id="crawlspiders">CrawlSpiders</h1>
<blockquote>
<p>&#x901A;&#x8FC7;&#x4E0B;&#x9762;&#x7684;&#x547D;&#x4EE4;&#x53EF;&#x4EE5;&#x5FEB;&#x901F;&#x521B;&#x5EFA; CrawlSpider&#x6A21;&#x677F; &#x7684;&#x4EE3;&#x7801;&#xFF1A;</p>
<p><code>scrapy genspider -t crawl tencent tencent.com</code></p>
</blockquote>
<p>&#x4E0A;&#x4E00;&#x4E2A;&#x6848;&#x4F8B;&#x4E2D;&#xFF0C;&#x6211;&#x4EEC;&#x901A;&#x8FC7;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#xFF0C;&#x5236;&#x4F5C;&#x4E86;&#x65B0;&#x7684;url&#x4F5C;&#x4E3A;Request&#x8BF7;&#x6C42;&#x53C2;&#x6570;&#xFF0C;&#x73B0;&#x5728;&#x6211;&#x4EEC;&#x53EF;&#x4EE5;&#x6362;&#x4E2A;&#x82B1;&#x6837;...</p>
<p><code>class scrapy.spiders.CrawlSpider</code></p>
<p>&#x5B83;&#x662F;Spider&#x7684;&#x6D3E;&#x751F;&#x7C7B;&#xFF0C;Spider&#x7C7B;&#x7684;&#x8BBE;&#x8BA1;&#x539F;&#x5219;&#x662F;&#x53EA;&#x722C;&#x53D6;start_url&#x5217;&#x8868;&#x4E2D;&#x7684;&#x7F51;&#x9875;&#xFF0C;&#x800C;CrawlSpider&#x7C7B;&#x5B9A;&#x4E49;&#x4E86;&#x4E00;&#x4E9B;&#x89C4;&#x5219;(rule)&#x6765;&#x63D0;&#x4F9B;&#x8DDF;&#x8FDB;link&#x7684;&#x65B9;&#x4FBF;&#x7684;&#x673A;&#x5236;&#xFF0C;&#x4ECE;&#x722C;&#x53D6;&#x7684;&#x7F51;&#x9875;&#x4E2D;&#x83B7;&#x53D6;link&#x5E76;&#x7EE7;&#x7EED;&#x722C;&#x53D6;&#x7684;&#x5DE5;&#x4F5C;&#x66F4;&#x9002;&#x5408;&#x3002;</p>
<h4 id="&#x6E90;&#x7801;&#x53C2;&#x8003;">&#x6E90;&#x7801;&#x53C2;&#x8003;</h4>
<pre><code class="lang-python"><span class="hljs-class"><span class="hljs-keyword">class</span> <span class="hljs-title">CrawlSpider</span><span class="hljs-params">(Spider)</span>:</span>
    rules = ()
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">__init__</span><span class="hljs-params">(self, *a, **kw)</span>:</span>
        super(CrawlSpider, self).__init__(*a, **kw)
        self._compile_rules()

    <span class="hljs-comment">#&#x9996;&#x5148;&#x8C03;&#x7528;parse()&#x6765;&#x5904;&#x7406;start_urls&#x4E2D;&#x8FD4;&#x56DE;&#x7684;response&#x5BF9;&#x8C61;</span>
    <span class="hljs-comment">#parse()&#x5219;&#x5C06;&#x8FD9;&#x4E9B;response&#x5BF9;&#x8C61;&#x4F20;&#x9012;&#x7ED9;&#x4E86;_parse_response()&#x51FD;&#x6570;&#x5904;&#x7406;&#xFF0C;&#x5E76;&#x8BBE;&#x7F6E;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x4E3A;parse_start_url()</span>
    <span class="hljs-comment">#&#x8BBE;&#x7F6E;&#x4E86;&#x8DDF;&#x8FDB;&#x6807;&#x5FD7;&#x4F4D;True</span>
    <span class="hljs-comment">#parse&#x5C06;&#x8FD4;&#x56DE;item&#x548C;&#x8DDF;&#x8FDB;&#x4E86;&#x7684;Request&#x5BF9;&#x8C61;    </span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">parse</span><span class="hljs-params">(self, response)</span>:</span>
        <span class="hljs-keyword">return</span> self._parse_response(response, self.parse_start_url, cb_kwargs={}, follow=<span class="hljs-keyword">True</span>)

    <span class="hljs-comment">#&#x5904;&#x7406;start_url&#x4E2D;&#x8FD4;&#x56DE;&#x7684;response&#xFF0C;&#x9700;&#x8981;&#x91CD;&#x5199;</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">parse_start_url</span><span class="hljs-params">(self, response)</span>:</span>
        <span class="hljs-keyword">return</span> []

    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">process_results</span><span class="hljs-params">(self, response, results)</span>:</span>
        <span class="hljs-keyword">return</span> results

    <span class="hljs-comment">#&#x4ECE;response&#x4E2D;&#x62BD;&#x53D6;&#x7B26;&#x5408;&#x4EFB;&#x4E00;&#x7528;&#x6237;&#x5B9A;&#x4E49;&apos;&#x89C4;&#x5219;&apos;&#x7684;&#x94FE;&#x63A5;&#xFF0C;&#x5E76;&#x6784;&#x9020;&#x6210;Resquest&#x5BF9;&#x8C61;&#x8FD4;&#x56DE;</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">_requests_to_follow</span><span class="hljs-params">(self, response)</span>:</span>
        <span class="hljs-keyword">if</span> <span class="hljs-keyword">not</span> isinstance(response, HtmlResponse):
            <span class="hljs-keyword">return</span>
        seen = set()
        <span class="hljs-comment">#&#x62BD;&#x53D6;&#x4E4B;&#x5185;&#x7684;&#x6240;&#x6709;&#x94FE;&#x63A5;&#xFF0C;&#x53EA;&#x8981;&#x901A;&#x8FC7;&#x4EFB;&#x610F;&#x4E00;&#x4E2A;&apos;&#x89C4;&#x5219;&apos;&#xFF0C;&#x5373;&#x8868;&#x793A;&#x5408;&#x6CD5;</span>
        <span class="hljs-keyword">for</span> n, rule <span class="hljs-keyword">in</span> enumerate(self._rules):
            links = [l <span class="hljs-keyword">for</span> l <span class="hljs-keyword">in</span> rule.link_extractor.extract_links(response) <span class="hljs-keyword">if</span> l <span class="hljs-keyword">not</span> <span class="hljs-keyword">in</span> seen]
            <span class="hljs-comment">#&#x4F7F;&#x7528;&#x7528;&#x6237;&#x6307;&#x5B9A;&#x7684;process_links&#x5904;&#x7406;&#x6BCF;&#x4E2A;&#x8FDE;&#x63A5;</span>
            <span class="hljs-keyword">if</span> links <span class="hljs-keyword">and</span> rule.process_links:
                links = rule.process_links(links)
            <span class="hljs-comment">#&#x5C06;&#x94FE;&#x63A5;&#x52A0;&#x5165;seen&#x96C6;&#x5408;&#xFF0C;&#x4E3A;&#x6BCF;&#x4E2A;&#x94FE;&#x63A5;&#x751F;&#x6210;Request&#x5BF9;&#x8C61;&#xFF0C;&#x5E76;&#x8BBE;&#x7F6E;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x4E3A;_repsonse_downloaded()</span>
            <span class="hljs-keyword">for</span> link <span class="hljs-keyword">in</span> links:
                seen.add(link)
                <span class="hljs-comment">#&#x6784;&#x9020;Request&#x5BF9;&#x8C61;&#xFF0C;&#x5E76;&#x5C06;Rule&#x89C4;&#x5219;&#x4E2D;&#x5B9A;&#x4E49;&#x7684;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x4F5C;&#x4E3A;&#x8FD9;&#x4E2A;Request&#x5BF9;&#x8C61;&#x7684;&#x56DE;&#x8C03;&#x51FD;&#x6570;</span>
                r = Request(url=link.url, callback=self._response_downloaded)
                r.meta.update(rule=n, link_text=link.text)
                <span class="hljs-comment">#&#x5BF9;&#x6BCF;&#x4E2A;Request&#x8C03;&#x7528;process_request()&#x51FD;&#x6570;&#x3002;&#x8BE5;&#x51FD;&#x6570;&#x9ED8;&#x8BA4;&#x4E3A;indentify&#xFF0C;&#x5373;&#x4E0D;&#x505A;&#x4EFB;&#x4F55;&#x5904;&#x7406;&#xFF0C;&#x76F4;&#x63A5;&#x8FD4;&#x56DE;&#x8BE5;Request.</span>
                <span class="hljs-keyword">yield</span> rule.process_request(r)

    <span class="hljs-comment">#&#x5904;&#x7406;&#x901A;&#x8FC7;rule&#x63D0;&#x53D6;&#x51FA;&#x7684;&#x8FDE;&#x63A5;&#xFF0C;&#x5E76;&#x8FD4;&#x56DE;item&#x4EE5;&#x53CA;request</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">_response_downloaded</span><span class="hljs-params">(self, response)</span>:</span>
        rule = self._rules[response.meta[<span class="hljs-string">&apos;rule&apos;</span>]]
        <span class="hljs-keyword">return</span> self._parse_response(response, rule.callback, rule.cb_kwargs, rule.follow)

    <span class="hljs-comment">#&#x89E3;&#x6790;response&#x5BF9;&#x8C61;&#xFF0C;&#x4F1A;&#x7528;callback&#x89E3;&#x6790;&#x5904;&#x7406;&#x4ED6;&#xFF0C;&#x5E76;&#x8FD4;&#x56DE;request&#x6216;Item&#x5BF9;&#x8C61;</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">_parse_response</span><span class="hljs-params">(self, response, callback, cb_kwargs, follow=True)</span>:</span>
        <span class="hljs-comment">#&#x9996;&#x5148;&#x5224;&#x65AD;&#x662F;&#x5426;&#x8BBE;&#x7F6E;&#x4E86;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x3002;&#xFF08;&#x8BE5;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x53EF;&#x80FD;&#x662F;rule&#x4E2D;&#x7684;&#x89E3;&#x6790;&#x51FD;&#x6570;&#xFF0C;&#x4E5F;&#x53EF;&#x80FD;&#x662F; parse_start_url&#x51FD;&#x6570;&#xFF09;</span>
        <span class="hljs-comment">#&#x5982;&#x679C;&#x8BBE;&#x7F6E;&#x4E86;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#xFF08;parse_start_url()&#xFF09;&#xFF0C;&#x90A3;&#x4E48;&#x9996;&#x5148;&#x7528;parse_start_url()&#x5904;&#x7406;response&#x5BF9;&#x8C61;&#xFF0C;</span>
        <span class="hljs-comment">#&#x7136;&#x540E;&#x518D;&#x4EA4;&#x7ED9;process_results&#x5904;&#x7406;&#x3002;&#x8FD4;&#x56DE;cb_res&#x7684;&#x4E00;&#x4E2A;&#x5217;&#x8868;</span>
        <span class="hljs-keyword">if</span> callback:
            <span class="hljs-comment">#&#x5982;&#x679C;&#x662F;parse&#x8C03;&#x7528;&#x7684;&#xFF0C;&#x5219;&#x4F1A;&#x89E3;&#x6790;&#x6210;Request&#x5BF9;&#x8C61;</span>
            <span class="hljs-comment">#&#x5982;&#x679C;&#x662F;rule callback&#xFF0C;&#x5219;&#x4F1A;&#x89E3;&#x6790;&#x6210;Item</span>
            cb_res = callback(response, **cb_kwargs) <span class="hljs-keyword">or</span> ()
            cb_res = self.process_results(response, cb_res)
            <span class="hljs-keyword">for</span> requests_or_item <span class="hljs-keyword">in</span> iterate_spider_output(cb_res):
                <span class="hljs-keyword">yield</span> requests_or_item

        <span class="hljs-comment">#&#x5982;&#x679C;&#x9700;&#x8981;&#x8DDF;&#x8FDB;&#xFF0C;&#x90A3;&#x4E48;&#x4F7F;&#x7528;&#x5B9A;&#x4E49;&#x7684;Rule&#x89C4;&#x5219;&#x63D0;&#x53D6;&#x5E76;&#x8FD4;&#x56DE;&#x8FD9;&#x4E9B;Request&#x5BF9;&#x8C61;</span>
        <span class="hljs-keyword">if</span> follow <span class="hljs-keyword">and</span> self._follow_links:
            <span class="hljs-comment">#&#x8FD4;&#x56DE;&#x6BCF;&#x4E2A;Request&#x5BF9;&#x8C61;</span>
            <span class="hljs-keyword">for</span> request_or_item <span class="hljs-keyword">in</span> self._requests_to_follow(response):
                <span class="hljs-keyword">yield</span> request_or_item

    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">_compile_rules</span><span class="hljs-params">(self)</span>:</span>
        <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">get_method</span><span class="hljs-params">(method)</span>:</span>
            <span class="hljs-keyword">if</span> callable(method):
                <span class="hljs-keyword">return</span> method
            <span class="hljs-keyword">elif</span> isinstance(method, basestring):
                <span class="hljs-keyword">return</span> getattr(self, method, <span class="hljs-keyword">None</span>)

        self._rules = [copy.copy(r) <span class="hljs-keyword">for</span> r <span class="hljs-keyword">in</span> self.rules]
        <span class="hljs-keyword">for</span> rule <span class="hljs-keyword">in</span> self._rules:
            rule.callback = get_method(rule.callback)
            rule.process_links = get_method(rule.process_links)
            rule.process_request = get_method(rule.process_request)

    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">set_crawler</span><span class="hljs-params">(self, crawler)</span>:</span>
        super(CrawlSpider, self).set_crawler(crawler)
        self._follow_links = crawler.settings.getbool(<span class="hljs-string">&apos;CRAWLSPIDER_FOLLOW_LINKS&apos;</span>, <span class="hljs-keyword">True</span>)
</code></pre>
<p>CrawlSpider&#x7EE7;&#x627F;&#x4E8E;Spider&#x7C7B;&#xFF0C;&#x9664;&#x4E86;&#x7EE7;&#x627F;&#x8FC7;&#x6765;&#x7684;&#x5C5E;&#x6027;&#x5916;&#xFF08;name&#x3001;allow_domains&#xFF09;&#xFF0C;&#x8FD8;&#x63D0;&#x4F9B;&#x4E86;&#x65B0;&#x7684;&#x5C5E;&#x6027;&#x548C;&#x65B9;&#x6CD5;: </p>
<h2 id="linkextractors">LinkExtractors</h2>
<pre><code class="lang-python">class scrapy.linkextractors.LinkExtractor
</code></pre>
<p>Link Extractors &#x7684;&#x76EE;&#x7684;&#x5F88;&#x7B80;&#x5355;: &#x63D0;&#x53D6;&#x94FE;&#x63A5;&#xFF61;</p>
<p>&#x6BCF;&#x4E2A;LinkExtractor&#x6709;&#x552F;&#x4E00;&#x7684;&#x516C;&#x5171;&#x65B9;&#x6CD5;&#x662F; extract_links()&#xFF0C;&#x5B83;&#x63A5;&#x6536;&#x4E00;&#x4E2A; Response &#x5BF9;&#x8C61;&#xFF0C;&#x5E76;&#x8FD4;&#x56DE;&#x4E00;&#x4E2A; scrapy.link.Link &#x5BF9;&#x8C61;&#x3002;</p>
<p>Link Extractors&#x8981;&#x5B9E;&#x4F8B;&#x5316;&#x4E00;&#x6B21;&#xFF0C;&#x5E76;&#x4E14; extract_links &#x65B9;&#x6CD5;&#x4F1A;&#x6839;&#x636E;&#x4E0D;&#x540C;&#x7684; response &#x8C03;&#x7528;&#x591A;&#x6B21;&#x63D0;&#x53D6;&#x94FE;&#x63A5;&#xFF61;</p>
<pre><code class="lang-python">class scrapy.linkextractors.LinkExtractor(
    allow = (),
    deny = (),
    allow_domains = (),
    deny_domains = (),
    deny_extensions = None,
    restrict_xpaths = (),
    tags = (&apos;a&apos;,&apos;area&apos;),
    attrs = (&apos;href&apos;),
    canonicalize = True,
    unique = True,
    process_value = None
)
</code></pre>
<p>&#x4E3B;&#x8981;&#x53C2;&#x6570;&#xFF1A;</p>
<ul>
<li><p><code>allow</code>&#xFF1A;&#x6EE1;&#x8DB3;&#x62EC;&#x53F7;&#x4E2D;&#x201C;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#x201D;&#x7684;&#x503C;&#x4F1A;&#x88AB;&#x63D0;&#x53D6;&#xFF0C;&#x5982;&#x679C;&#x4E3A;&#x7A7A;&#xFF0C;&#x5219;&#x5168;&#x90E8;&#x5339;&#x914D;&#x3002;</p>
</li>
<li><p><code>deny</code>&#xFF1A;&#x4E0E;&#x8FD9;&#x4E2A;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;(&#x6216;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#x5217;&#x8868;)&#x4E0D;&#x5339;&#x914D;&#x7684;URL&#x4E00;&#x5B9A;&#x4E0D;&#x63D0;&#x53D6;&#x3002;</p>
</li>
<li><p><code>allow_domains</code>&#xFF1A;&#x4F1A;&#x88AB;&#x63D0;&#x53D6;&#x7684;&#x94FE;&#x63A5;&#x7684;domains&#x3002;</p>
</li>
<li><p><code>deny_domains</code>&#xFF1A;&#x4E00;&#x5B9A;&#x4E0D;&#x4F1A;&#x88AB;&#x63D0;&#x53D6;&#x94FE;&#x63A5;&#x7684;domains&#x3002;</p>
</li>
<li><p><code>restrict_xpaths</code>&#xFF1A;&#x4F7F;&#x7528;xpath&#x8868;&#x8FBE;&#x5F0F;&#xFF0C;&#x548C;allow&#x5171;&#x540C;&#x4F5C;&#x7528;&#x8FC7;&#x6EE4;&#x94FE;&#x63A5;&#x3002;</p>
</li>
</ul>
<h2 id="rules">rules</h2>
<p>&#x5728;rules&#x4E2D;&#x5305;&#x542B;&#x4E00;&#x4E2A;&#x6216;&#x591A;&#x4E2A;Rule&#x5BF9;&#x8C61;&#xFF0C;&#x6BCF;&#x4E2A;Rule&#x5BF9;&#x722C;&#x53D6;&#x7F51;&#x7AD9;&#x7684;&#x52A8;&#x4F5C;&#x5B9A;&#x4E49;&#x4E86;&#x7279;&#x5B9A;&#x64CD;&#x4F5C;&#x3002;&#x5982;&#x679C;&#x591A;&#x4E2A;rule&#x5339;&#x914D;&#x4E86;&#x76F8;&#x540C;&#x7684;&#x94FE;&#x63A5;&#xFF0C;&#x5219;&#x6839;&#x636E;&#x89C4;&#x5219;&#x5728;&#x672C;&#x96C6;&#x5408;&#x4E2D;&#x88AB;&#x5B9A;&#x4E49;&#x7684;&#x987A;&#x5E8F;&#xFF0C;&#x7B2C;&#x4E00;&#x4E2A;&#x4F1A;&#x88AB;&#x4F7F;&#x7528;&#x3002;</p>
<pre><code class="lang-python">
class scrapy.spiders.Rule(
        link_extractor, 
        callback = None, 
        cb_kwargs = None, 
        follow = None, 
        process_links = None, 
        process_request = None
)
</code></pre>
<ul>
<li><p><code>link_extractor</code>&#xFF1A;&#x662F;&#x4E00;&#x4E2A;Link Extractor&#x5BF9;&#x8C61;&#xFF0C;&#x7528;&#x4E8E;&#x5B9A;&#x4E49;&#x9700;&#x8981;&#x63D0;&#x53D6;&#x7684;&#x94FE;&#x63A5;&#x3002;</p>
</li>
<li><p><code>callback</code>&#xFF1A; &#x4ECE;link_extractor&#x4E2D;&#x6BCF;&#x83B7;&#x53D6;&#x5230;&#x94FE;&#x63A5;&#x65F6;&#xFF0C;&#x53C2;&#x6570;&#x6240;&#x6307;&#x5B9A;&#x7684;&#x503C;&#x4F5C;&#x4E3A;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#xFF0C;&#x8BE5;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x63A5;&#x53D7;&#x4E00;&#x4E2A;response&#x4F5C;&#x4E3A;&#x5176;&#x7B2C;&#x4E00;&#x4E2A;&#x53C2;&#x6570;&#x3002;</p>
<blockquote>
<p>&#x6CE8;&#x610F;&#xFF1A;&#x5F53;&#x7F16;&#x5199;&#x722C;&#x866B;&#x89C4;&#x5219;&#x65F6;&#xFF0C;&#x907F;&#x514D;&#x4F7F;&#x7528;parse&#x4F5C;&#x4E3A;&#x56DE;&#x8C03;&#x51FD;&#x6570;&#x3002;&#x7531;&#x4E8E;CrawlSpider&#x4F7F;&#x7528;parse&#x65B9;&#x6CD5;&#x6765;&#x5B9E;&#x73B0;&#x5176;&#x903B;&#x8F91;&#xFF0C;&#x5982;&#x679C;&#x8986;&#x76D6;&#x4E86; parse&#x65B9;&#x6CD5;&#xFF0C;crawl spider&#x5C06;&#x4F1A;&#x8FD0;&#x884C;&#x5931;&#x8D25;&#x3002;</p>
</blockquote>
</li>
<li><p><code>follow</code>&#xFF1A;&#x662F;&#x4E00;&#x4E2A;&#x5E03;&#x5C14;(boolean)&#x503C;&#xFF0C;&#x6307;&#x5B9A;&#x4E86;&#x6839;&#x636E;&#x8BE5;&#x89C4;&#x5219;&#x4ECE;response&#x63D0;&#x53D6;&#x7684;&#x94FE;&#x63A5;&#x662F;&#x5426;&#x9700;&#x8981;&#x8DDF;&#x8FDB;&#x3002; &#x5982;&#x679C;callback&#x4E3A;None&#xFF0C;follow &#x9ED8;&#x8BA4;&#x8BBE;&#x7F6E;&#x4E3A;True &#xFF0C;&#x5426;&#x5219;&#x9ED8;&#x8BA4;&#x4E3A;False&#x3002;</p>
</li>
<li><p><code>process_links</code>&#xFF1A;&#x6307;&#x5B9A;&#x8BE5;spider&#x4E2D;&#x54EA;&#x4E2A;&#x7684;&#x51FD;&#x6570;&#x5C06;&#x4F1A;&#x88AB;&#x8C03;&#x7528;&#xFF0C;&#x4ECE;link_extractor&#x4E2D;&#x83B7;&#x53D6;&#x5230;&#x94FE;&#x63A5;&#x5217;&#x8868;&#x65F6;&#x5C06;&#x4F1A;&#x8C03;&#x7528;&#x8BE5;&#x51FD;&#x6570;&#x3002;&#x8BE5;&#x65B9;&#x6CD5;&#x4E3B;&#x8981;&#x7528;&#x6765;&#x8FC7;&#x6EE4;&#x3002;</p>
</li>
<li><p><code>process_request</code>&#xFF1A;&#x6307;&#x5B9A;&#x8BE5;spider&#x4E2D;&#x54EA;&#x4E2A;&#x7684;&#x51FD;&#x6570;&#x5C06;&#x4F1A;&#x88AB;&#x8C03;&#x7528;&#xFF0C; &#x8BE5;&#x89C4;&#x5219;&#x63D0;&#x53D6;&#x5230;&#x6BCF;&#x4E2A;request&#x65F6;&#x90FD;&#x4F1A;&#x8C03;&#x7528;&#x8BE5;&#x51FD;&#x6570;&#x3002; (&#x7528;&#x6765;&#x8FC7;&#x6EE4;request)</p>
</li>
</ul>
<h2 id="&#x722C;&#x53D6;&#x89C4;&#x5219;crawling-rules">&#x722C;&#x53D6;&#x89C4;&#x5219;(Crawling rules)</h2>
<p>&#x7EE7;&#x7EED;&#x7528;&#x817E;&#x8BAF;&#x62DB;&#x8058;&#x4E3A;&#x4F8B;&#xFF0C;&#x7ED9;&#x51FA;&#x914D;&#x5408;rule&#x4F7F;&#x7528;CrawlSpider&#x7684;&#x4F8B;&#x5B50;:</p>
<ol>
<li><p>&#x9996;&#x5148;&#x8FD0;&#x884C;</p>
<pre><code class="lang-sh"> scrapy shell <span class="hljs-string">&quot;http://hr.tencent.com/position.php?&amp;start=0#a&quot;</span>
</code></pre>
</li>
<li><p>&#x5BFC;&#x5165;LinkExtractor&#xFF0C;&#x521B;&#x5EFA;LinkExtractor&#x5B9E;&#x4F8B;&#x5BF9;&#x8C61;&#x3002;&#xFF1A;</p>
<pre><code class="lang-python"> <span class="hljs-keyword">from</span> scrapy.linkextractors <span class="hljs-keyword">import</span> LinkExtractor

 page_lx = LinkExtractor(allow=(<span class="hljs-string">&apos;position.php?&amp;start=\d+&apos;</span>))
</code></pre>
<blockquote>
<p>allow : LinkExtractor&#x5BF9;&#x8C61;&#x6700;&#x91CD;&#x8981;&#x7684;&#x53C2;&#x6570;&#x4E4B;&#x4E00;&#xFF0C;&#x8FD9;&#x662F;&#x4E00;&#x4E2A;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#xFF0C;&#x5FC5;&#x987B;&#x8981;&#x5339;&#x914D;&#x8FD9;&#x4E2A;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;(&#x6216;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#x5217;&#x8868;)&#x7684;URL&#x624D;&#x4F1A;&#x88AB;&#x63D0;&#x53D6;&#xFF0C;&#x5982;&#x679C;&#x6CA1;&#x6709;&#x7ED9;&#x51FA;(&#x6216;&#x4E3A;&#x7A7A;), &#x5B83;&#x4F1A;&#x5339;&#x914D;&#x6240;&#x6709;&#x7684;&#x94FE;&#x63A5;&#xFF61;</p>
<p>deny : &#x7528;&#x6CD5;&#x540C;allow&#xFF0C;&#x53EA;&#x4E0D;&#x8FC7;&#x4E0E;&#x8FD9;&#x4E2A;&#x6B63;&#x5219;&#x8868;&#x8FBE;&#x5F0F;&#x5339;&#x914D;&#x7684;URL&#x4E0D;&#x4F1A;&#x88AB;&#x63D0;&#x53D6;)&#xFF61;&#x5B83;&#x7684;&#x4F18;&#x5148;&#x7EA7;&#x9AD8;&#x4E8E; allow &#x7684;&#x53C2;&#x6570;&#xFF0C;&#x5982;&#x679C;&#x6CA1;&#x6709;&#x7ED9;&#x51FA;(&#x6216;None), &#x5C06;&#x4E0D;&#x6392;&#x9664;&#x4EFB;&#x4F55;&#x94FE;&#x63A5;&#xFF61;</p>
</blockquote>
</li>
<li><p>&#x8C03;&#x7528;LinkExtractor&#x5B9E;&#x4F8B;&#x7684;extract_links()&#x65B9;&#x6CD5;&#x67E5;&#x8BE2;&#x5339;&#x914D;&#x7ED3;&#x679C;&#xFF1A;</p>
<pre><code class="lang-python"> page_lx.extract_links(response)
</code></pre>
</li>
<li><p>&#x6CA1;&#x6709;&#x67E5;&#x5230;&#xFF1A;</p>
<pre><code class="lang-python"> []
</code></pre>
</li>
<li><p>&#x6CE8;&#x610F;&#x8F6C;&#x4E49;&#x5B57;&#x7B26;&#x7684;&#x95EE;&#x9898;&#xFF0C;&#x7EE7;&#x7EED;&#x91CD;&#x65B0;&#x5339;&#x914D;&#xFF1A;</p>
<pre><code class="lang-python"> page_lx = LinkExtractor(allow=(<span class="hljs-string">&apos;position\.php\?&amp;start=\d+&apos;</span>))
 <span class="hljs-comment"># page_lx = LinkExtractor(allow = (&apos;start=\d+&apos;))</span>

 page_lx.extract_links(response)
</code></pre>
</li>
</ol>
<p><img src="../images/tencent_rule.png" alt=""></p>
<h2 id="crawlspider-&#x7248;&#x672C;">CrawlSpider &#x7248;&#x672C;</h2>
<p>&#x90A3;&#x4E48;&#xFF0C;scrapy shell&#x6D4B;&#x8BD5;&#x5B8C;&#x6210;&#x4E4B;&#x540E;&#xFF0C;&#x4FEE;&#x6539;&#x4EE5;&#x4E0B;&#x4EE3;&#x7801;</p>
<pre><code class="lang-python"><span class="hljs-comment">#&#x63D0;&#x53D6;&#x5339;&#x914D; &apos;http://hr.tencent.com/position.php?&amp;start=\d+&apos;&#x7684;&#x94FE;&#x63A5;</span>
page_lx = LinkExtractor(allow = (<span class="hljs-string">&apos;start=\d+&apos;</span>))

rules = [
    <span class="hljs-comment">#&#x63D0;&#x53D6;&#x5339;&#x914D;,&#x5E76;&#x4F7F;&#x7528;spider&#x7684;parse&#x65B9;&#x6CD5;&#x8FDB;&#x884C;&#x5206;&#x6790;;&#x5E76;&#x8DDF;&#x8FDB;&#x94FE;&#x63A5;(&#x6CA1;&#x6709;callback&#x610F;&#x5473;&#x7740;follow&#x9ED8;&#x8BA4;&#x4E3A;True)</span>
    Rule(page_lx, callback = <span class="hljs-string">&apos;parse&apos;</span>, follow = <span class="hljs-keyword">True</span>)
]
</code></pre>
<p><strong>&#x8FD9;&#x4E48;&#x5199;&#x5BF9;&#x5417;&#xFF1F;</strong></p>
<p><strong>&#x4E0D;&#x5BF9;&#xFF01;&#x5343;&#x4E07;&#x8BB0;&#x4F4F; callback &#x5343;&#x4E07;&#x4E0D;&#x80FD;&#x5199; parse&#xFF0C;&#x518D;&#x6B21;&#x5F3A;&#x8C03;&#xFF1A;&#x7531;&#x4E8E;CrawlSpider&#x4F7F;&#x7528;parse&#x65B9;&#x6CD5;&#x6765;&#x5B9E;&#x73B0;&#x5176;&#x903B;&#x8F91;&#xFF0C;&#x5982;&#x679C;&#x8986;&#x76D6;&#x4E86; parse&#x65B9;&#x6CD5;&#xFF0C;crawl spider&#x5C06;&#x4F1A;&#x8FD0;&#x884C;&#x5931;&#x8D25;&#x3002;</strong></p>
<pre><code class="lang-python"><span class="hljs-comment">#tencent.py</span>

<span class="hljs-keyword">import</span> scrapy
<span class="hljs-keyword">from</span> scrapy.spiders <span class="hljs-keyword">import</span> CrawlSpider, Rule
<span class="hljs-keyword">from</span> scrapy.linkextractors <span class="hljs-keyword">import</span> LinkExtractor
<span class="hljs-keyword">from</span> mySpider.items <span class="hljs-keyword">import</span> TencentItem

<span class="hljs-class"><span class="hljs-keyword">class</span> <span class="hljs-title">TencentSpider</span><span class="hljs-params">(CrawlSpider)</span>:</span>
    name = <span class="hljs-string">&quot;tencent&quot;</span>
    allowed_domains = [<span class="hljs-string">&quot;hr.tencent.com&quot;</span>]
    start_urls = [
        <span class="hljs-string">&quot;http://hr.tencent.com/position.php?&amp;start=0#a&quot;</span>
    ]

    page_lx = LinkExtractor(allow=(<span class="hljs-string">&quot;start=\d+&quot;</span>))

    rules = [
        Rule(page_lx, callback = <span class="hljs-string">&quot;parseContent&quot;</span>, follow = <span class="hljs-keyword">True</span>)
    ]

    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">parseContent</span><span class="hljs-params">(self, response)</span>:</span>
        <span class="hljs-keyword">for</span> each <span class="hljs-keyword">in</span> response.xpath(<span class="hljs-string">&apos;//*[@class=&quot;even&quot;]&apos;</span>):
            name = each.xpath(<span class="hljs-string">&apos;./td[1]/a/text()&apos;</span>).extract()[<span class="hljs-number">0</span>]
            detailLink = each.xpath(<span class="hljs-string">&apos;./td[1]/a/@href&apos;</span>).extract()[<span class="hljs-number">0</span>]
            positionInfo = each.xpath(<span class="hljs-string">&apos;./td[2]/text()&apos;</span>).extract()[<span class="hljs-number">0</span>]

            peopleNumber = each.xpath(<span class="hljs-string">&apos;./td[3]/text()&apos;</span>).extract()[<span class="hljs-number">0</span>]
            workLocation = each.xpath(<span class="hljs-string">&apos;./td[4]/text()&apos;</span>).extract()[<span class="hljs-number">0</span>]
            publishTime = each.xpath(<span class="hljs-string">&apos;./td[5]/text()&apos;</span>).extract()[<span class="hljs-number">0</span>]
            <span class="hljs-comment">#print name, detailLink, catalog,recruitNumber,workLocation,publishTime</span>

            item = TencentItem()
            item[<span class="hljs-string">&apos;name&apos;</span>]=name.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)
            item[<span class="hljs-string">&apos;detailLink&apos;</span>]=detailLink.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)
            item[<span class="hljs-string">&apos;positionInfo&apos;</span>]=positionInfo.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)
            item[<span class="hljs-string">&apos;peopleNumber&apos;</span>]=peopleNumber.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)
            item[<span class="hljs-string">&apos;workLocation&apos;</span>]=workLocation.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)
            item[<span class="hljs-string">&apos;publishTime&apos;</span>]=publishTime.encode(<span class="hljs-string">&apos;utf-8&apos;</span>)

            <span class="hljs-keyword">yield</span> item

    <span class="hljs-comment"># parse() &#x65B9;&#x6CD5;&#x4E0D;&#x9700;&#x8981;&#x5199;     </span>
    <span class="hljs-comment"># def parse(self, response):                                              </span>
    <span class="hljs-comment">#     pass</span>
</code></pre>
<p>&#x8FD0;&#x884C;&#xFF1A;
<code>scrapy crawl tencent</code></p>
<h2 id="logging">Logging</h2>
<p>Scrapy&#x63D0;&#x4F9B;&#x4E86;log&#x529F;&#x80FD;&#xFF0C;&#x53EF;&#x4EE5;&#x901A;&#x8FC7; logging &#x6A21;&#x5757;&#x4F7F;&#x7528;&#x3002;</p>
<blockquote>
<p>&#x53EF;&#x4EE5;&#x4FEE;&#x6539;&#x914D;&#x7F6E;&#x6587;&#x4EF6;settings.py&#xFF0C;&#x4EFB;&#x610F;&#x4F4D;&#x7F6E;&#x6DFB;&#x52A0;&#x4E0B;&#x9762;&#x4E24;&#x884C;&#xFF0C;&#x6548;&#x679C;&#x4F1A;&#x6E05;&#x723D;&#x5F88;&#x591A;&#x3002;</p>
</blockquote>
<pre><code>LOG_FILE = &quot;TencentSpider.log&quot;
LOG_LEVEL = &quot;INFO&quot;
</code></pre><h4 id="log-levels">Log levels</h4>
<ul>
<li><p>Scrapy&#x63D0;&#x4F9B;5&#x5C42;logging&#x7EA7;&#x522B;:</p>
</li>
<li><p>CRITICAL - &#x4E25;&#x91CD;&#x9519;&#x8BEF;(critical)</p>
</li>
<li>ERROR - &#x4E00;&#x822C;&#x9519;&#x8BEF;(regular errors)</li>
<li>WARNING - &#x8B66;&#x544A;&#x4FE1;&#x606F;(warning messages)</li>
<li>INFO - &#x4E00;&#x822C;&#x4FE1;&#x606F;(informational messages)</li>
<li>DEBUG - &#x8C03;&#x8BD5;&#x4FE1;&#x606F;(debugging messages)</li>
</ul>
<h4 id="logging&#x8BBE;&#x7F6E;">logging&#x8BBE;&#x7F6E;</h4>
<p>&#x901A;&#x8FC7;&#x5728;setting.py&#x4E2D;&#x8FDB;&#x884C;&#x4EE5;&#x4E0B;&#x8BBE;&#x7F6E;&#x53EF;&#x4EE5;&#x88AB;&#x7528;&#x6765;&#x914D;&#x7F6E;logging:</p>
<ol>
<li><code>LOG_ENABLED</code> &#x9ED8;&#x8BA4;: True&#xFF0C;&#x542F;&#x7528;logging </li>
<li><code>LOG_ENCODING</code> &#x9ED8;&#x8BA4;: &apos;utf-8&apos;&#xFF0C;logging&#x4F7F;&#x7528;&#x7684;&#x7F16;&#x7801;</li>
<li><code>LOG_FILE</code> &#x9ED8;&#x8BA4;: None&#xFF0C;&#x5728;&#x5F53;&#x524D;&#x76EE;&#x5F55;&#x91CC;&#x521B;&#x5EFA;logging&#x8F93;&#x51FA;&#x6587;&#x4EF6;&#x7684;&#x6587;&#x4EF6;&#x540D;</li>
<li><code>LOG_LEVEL</code> &#x9ED8;&#x8BA4;: &apos;DEBUG&apos;&#xFF0C;log&#x7684;&#x6700;&#x4F4E;&#x7EA7;&#x522B; </li>
<li><code>LOG_STDOUT</code> &#x9ED8;&#x8BA4;: False &#x5982;&#x679C;&#x4E3A; True&#xFF0C;&#x8FDB;&#x7A0B;&#x6240;&#x6709;&#x7684;&#x6807;&#x51C6;&#x8F93;&#x51FA;(&#x53CA;&#x9519;&#x8BEF;)&#x5C06;&#x4F1A;&#x88AB;&#x91CD;&#x5B9A;&#x5411;&#x5230;log&#x4E2D;&#x3002;&#x4F8B;&#x5982;&#xFF0C;&#x6267;&#x884C; print &quot;hello&quot; &#xFF0C;&#x5176;&#x5C06;&#x4F1A;&#x5728;Scrapy log&#x4E2D;&#x663E;&#x793A;&#x3002;</li>
</ol>
<footer class="page-footer"><span class="copyright">Copyright &#xA9; BigCat all right reserved&#xFF0C;powered by Gitbook</span><span class="footer-modification">&#x300C;Revision Time:
2017-02-05 22:48:24&#x300D;
</span></footer>
                    
                    </section>
                
                
                </div>
            </div>
        </div>

        
        <a href="../../file/part04/4.5.html" class="navigation navigation-prev " aria-label="Previous page: Spiders"><i class="fa fa-angle-left"></i></a>
        
        
        <a href="../../file/part04/4.7.html" class="navigation navigation-next " aria-label="Next page: Request/Response"><i class="fa fa-angle-right"></i></a>
        
    </div>
</div>

        
<script src="../../gitbook/app.js"></script>

    
    <script src="../../gitbook/plugins/gitbook-plugin-splitter/splitter.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-toggle-chapters/toggle.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-fontsettings/buttons.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-livereload/plugin.js"></script>
    

<script>
require(["gitbook"], function(gitbook) {
    var config = {"disqus":{"shortName":"gitbookuse"},"github":{"url":"https://github.com/dododream"},"search-pro":{"cutWordLib":"nodejieba","defineWord":["gitbook-use"]},"sharing":{"weibo":true,"facebook":true,"twitter":true,"google":false,"instapaper":false,"vk":false,"all":["facebook","google","twitter","weibo","instapaper"]},"tbfed-pagefooter":{"copyright":"Copyright © BigCat","modify_label":"「Revision Time:","modify_format":"YYYY-MM-DD HH:mm:ss」"},"baidu":{"token":"ff100361cdce95dd4c8fb96b4009f7bc"},"sitemap":{"hostname":"http://www.treenewbee.top"},"donate":{"wechat":"http://weixin.png","alipay":"http://alipay.png","title":"","button":"赏","alipayText":"支付宝打赏","wechatText":"微信打赏"},"edit-link":{"base":"https://github.com/dododream/edit","label":"Edit This Page"},"splitter":{},"toggle-chapters":{},"highlight":{},"fontsettings":{"theme":"white","family":"sans","size":2},"livereload":{}};
    gitbook.start(config);
});
</script>

        <!-- body:end -->
    </body>
    <!-- End of book Python爬虫课程讲义 -->
</html>
