<!DOCTYPE HTML>
<html lang="en" >
    <!-- Start book Python爬虫课程讲义 -->
    <head>
        <!-- head:start -->
        <meta charset="UTF-8">
        <meta http-equiv="X-UA-Compatible" content="IE=edge" />
        <title>尝试改写新浪网分类资讯爬虫2 | Python爬虫课程讲义</title>
        <meta content="text/html; charset=utf-8" http-equiv="Content-Type">
        <meta name="description" content="">
        <meta name="generator" content="GitBook 2.6.7">
        <meta name="author" content="BigCat">
        
        <meta name="HandheldFriendly" content="true"/>
        <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
        <meta name="apple-mobile-web-app-capable" content="yes">
        <meta name="apple-mobile-web-app-status-bar-style" content="black">
        <link rel="apple-touch-icon-precomposed" sizes="152x152" href="../../gitbook/images/apple-touch-icon-precomposed-152.png">
        <link rel="shortcut icon" href="../../gitbook/images/favicon.ico" type="image/x-icon">
        
    <link rel="stylesheet" href="../../gitbook/style.css">
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-tbfed-pagefooter/footer.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-splitter/splitter.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-toggle-chapters/toggle.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-highlight/website.css">
        
    
        
        <link rel="stylesheet" href="../../gitbook/plugins/gitbook-plugin-fontsettings/website.css">
        
    
    

        
    
    
    <link rel="next" href="../../file/part07/7.7.html" />
    
    
    <link rel="prev" href="../../file/part07/7.5.html" />
    

        <!-- head:end -->
    </head>
    <body>
        <!-- body:start -->
        
    <div class="book"
        data-level="7.6"
        data-chapter-title="尝试改写新浪网分类资讯爬虫2"
        data-filepath="file/part07/7.6.md"
        data-basepath="../.."
        data-revision="Thu Feb 09 2017 09:48:59 GMT+0800 (CST)"
        data-innerlanguage="">
    

<div class="book-summary">
    <nav role="navigation">
        <ul class="summary">
            
            
            
            

            

            
    
        <li class="chapter " data-level="0" data-path="index.html">
            
                
                    <a href="../../index.html">
                
                        <i class="fa fa-check"></i>
                        
                        传智播客Python学院爬虫课程
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1" data-path="file/part01/1.html">
            
                
                    <a href="../../file/part01/1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.</b>
                        
                        爬虫原理与数据抓取
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="1.1" data-path="file/part01/1.1.html">
            
                
                    <a href="../../file/part01/1.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.1.</b>
                        
                        (了解)通用爬虫和聚焦爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.2" data-path="file/part01/1.2.html">
            
                
                    <a href="../../file/part01/1.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.2.</b>
                        
                        (复习)HTTP/HTTPS的请求与响应
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.3" data-path="file/part01/1.3.html">
            
                
                    <a href="../../file/part01/1.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.3.</b>
                        
                        HTTP/HTTPS抓包工具-Fiddler
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.4" data-path="file/part01/1.4.html">
            
                
                    <a href="../../file/part01/1.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.4.</b>
                        
                        urllib2模块的基本使用
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.5" data-path="file/part01/1.5.html">
            
                
                    <a href="../../file/part01/1.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.5.</b>
                        
                        urllib2：GET请求和POST请求
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.6" data-path="file/part01/1.6.html">
            
                
                    <a href="../../file/part01/1.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.6.</b>
                        
                        urllib2：Handler处理器和自定义Opener
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.7" data-path="file/part01/1.7.html">
            
                
                    <a href="../../file/part01/1.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.7.</b>
                        
                        urllib2：URLError与HTTPError
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="1.8" data-path="file/part01/1.8.html">
            
                
                    <a href="../../file/part01/1.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>1.8.</b>
                        
                        Requests模块
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="2" data-path="file/part02/2.html">
            
                
                    <a href="../../file/part02/2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.</b>
                        
                        非结构化数据与结构化数据提取
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="2.1" data-path="file/part02/2.1.html">
            
                
                    <a href="../../file/part02/2.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.1.</b>
                        
                        正则表达式re模块
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.2" data-path="file/part02/2.2.html">
            
                
                    <a href="../../file/part02/2.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.2.</b>
                        
                        案例：使用正则表达式的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.3" data-path="file/part02/2.3.html">
            
                
                    <a href="../../file/part02/2.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.3.</b>
                        
                        XPath与lxml类库
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.4" data-path="file/part02/2.4.html">
            
                
                    <a href="../../file/part02/2.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.4.</b>
                        
                        案例：使用XPath的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.5" data-path="file/part02/2.5.html">
            
                
                    <a href="../../file/part02/2.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.5.</b>
                        
                        BeautifulSoup4 解析器
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.6" data-path="file/part02/2.6.html">
            
                
                    <a href="../../file/part02/2.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.6.</b>
                        
                        案例：使用bs4的爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.7" data-path="file/part02/2.7.html">
            
                
                    <a href="../../file/part02/2.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.7.</b>
                        
                        JSON模块与JsonPath
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.8" data-path="file/part02/2.8.html">
            
                
                    <a href="../../file/part02/2.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.8.</b>
                        
                        糗事百科案例
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="2.9" data-path="file/part02/2.9.html">
            
                
                    <a href="../../file/part02/2.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>2.9.</b>
                        
                        多线程爬虫案例
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="3" data-path="file/part03/3.html">
            
                
                    <a href="../../file/part03/3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.</b>
                        
                        动态HTML处理和机器图像识别
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="3.1" data-path="file/part03/3.1.html">
            
                
                    <a href="../../file/part03/3.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.1.</b>
                        
                        动态HTML介绍
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.2" data-path="file/part03/3.2.html">
            
                
                    <a href="../../file/part03/3.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.2.</b>
                        
                        Selenium与PhantomJS
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.3" data-path="file/part03/3.3.html">
            
                
                    <a href="../../file/part03/3.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.3.</b>
                        
                        案例一：网站模拟登录
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.4" data-path="file/part03/3.4.html">
            
                
                    <a href="../../file/part03/3.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.4.</b>
                        
                        案例二：动态页面模拟点击
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.5" data-path="file/part03/3.5.html">
            
                
                    <a href="../../file/part03/3.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.5.</b>
                        
                        案例三：执行JavaScript语句
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.6" data-path="file/part03/3.6.html">
            
                
                    <a href="../../file/part03/3.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.6.</b>
                        
                        机器视觉与Tesseract介绍
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.7" data-path="file/part03/3.7.html">
            
                
                    <a href="../../file/part03/3.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.7.</b>
                        
                        处理一些格式规范的文字
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.8" data-path="file/part03/3.8.html">
            
                
                    <a href="../../file/part03/3.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.8.</b>
                        
                        案例：尝试对验证码进行机器识别处理
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="3.9" data-path="file/part03/3.9.html">
            
                
                    <a href="../../file/part03/3.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>3.9.</b>
                        
                        机器学习：训练Tesseract
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="4" data-path="file/part04/4.html">
            
                
                    <a href="../../file/part04/4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.</b>
                        
                        Scrapy框架
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="4.1" data-path="file/part04/4.1.html">
            
                
                    <a href="../../file/part04/4.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.1.</b>
                        
                        配置安装
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.2" data-path="file/part04/4.2.html">
            
                
                    <a href="../../file/part04/4.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.2.</b>
                        
                        入门案例
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.3" data-path="file/part04/4.3.html">
            
                
                    <a href="../../file/part04/4.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.3.</b>
                        
                        Scrapy Shell
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.4" data-path="file/part04/4.4.html">
            
                
                    <a href="../../file/part04/4.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.4.</b>
                        
                        Item Pipeline
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.5" data-path="file/part04/4.5.html">
            
                
                    <a href="../../file/part04/4.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.5.</b>
                        
                        Spiders
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.6" data-path="file/part04/4.6.html">
            
                
                    <a href="../../file/part04/4.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.6.</b>
                        
                        CrawlSpiders
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.7" data-path="file/part04/4.7.html">
            
                
                    <a href="../../file/part04/4.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.7.</b>
                        
                        Request/Response
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.8" data-path="file/part04/4.8.html">
            
                
                    <a href="../../file/part04/4.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.8.</b>
                        
                        Downloader Middlewares
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="4.9" data-path="file/part04/4.9.html">
            
                
                    <a href="../../file/part04/4.9.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>4.9.</b>
                        
                        Settings
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="5" data-path="file/part05/5.html">
            
                
                    <a href="../../file/part05/5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.</b>
                        
                        Scrapy实战项目
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="5.1" data-path="file/part05/5.1.html">
            
                
                    <a href="../../file/part05/5.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.1.</b>
                        
                        (案例一)手机App抓包爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.2" data-path="file/part05/5.2.html">
            
                
                    <a href="../../file/part05/5.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.2.</b>
                        
                        (案例二)阳光热线问政平台爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.3" data-path="file/part05/5.3.html">
            
                
                    <a href="../../file/part05/5.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.3.</b>
                        
                        (案例三)新浪网分类资讯爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.4" data-path="file/part05/5.4.html">
            
                
                    <a href="../../file/part05/5.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.4.</b>
                        
                        (案例四)图片下载器爬虫
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.5" data-path="file/part05/5.5.html">
            
                
                    <a href="../../file/part05/5.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.5.</b>
                        
                        (案例五)将数据保存在MongoDB中
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.6" data-path="file/part05/5.6.html">
            
                
                    <a href="../../file/part05/5.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.6.</b>
                        
                        (案例六)三种scrapy模拟登陆策略
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="5.7" data-path="file/part05/5.7.html">
            
                
                    <a href="../../file/part05/5.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>5.7.</b>
                        
                        附：通过Fiddler进行手机抓包方法
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="6" data-path="file/part06/6.html">
            
                
                    <a href="../../file/part06/6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.</b>
                        
                        scrapy-redis分布式组件
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="6.1" data-path="file/part06/6.1.html">
            
                
                    <a href="../../file/part06/6.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.1.</b>
                        
                        源码分析参考：Connection
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.2" data-path="file/part06/6.2.html">
            
                
                    <a href="../../file/part06/6.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.2.</b>
                        
                        源码分析参考：Dupefilter
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.3" data-path="file/part06/6.3.html">
            
                
                    <a href="../../file/part06/6.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.3.</b>
                        
                        源码分析参考：Picklecompat
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.4" data-path="file/part06/6.4.html">
            
                
                    <a href="../../file/part06/6.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.4.</b>
                        
                        源码分析参考：Pipelines
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.5" data-path="file/part06/6.5.html">
            
                
                    <a href="../../file/part06/6.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.5.</b>
                        
                        源码分析参考：Queue
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.6" data-path="file/part06/6.6.html">
            
                
                    <a href="../../file/part06/6.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.6.</b>
                        
                        源码分析参考：Scheduler
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="6.7" data-path="file/part06/6.7.html">
            
                
                    <a href="../../file/part06/6.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>6.7.</b>
                        
                        源码分析参考：Spider
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="7" data-path="file/part07/7.html">
            
                
                    <a href="../../file/part07/7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.</b>
                        
                        scrapy-redis实战
                    </a>
            
            
            <ul class="articles">
                
    
        <li class="chapter " data-level="7.1" data-path="file/part07/7.1.html">
            
                
                    <a href="../../file/part07/7.1.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.1.</b>
                        
                        源码自带项目说明
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.2" data-path="file/part07/7.2.html">
            
                
                    <a href="../../file/part07/7.2.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.2.</b>
                        
                        有缘网分布式爬虫项目1
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.3" data-path="file/part07/7.3.html">
            
                
                    <a href="../../file/part07/7.3.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.3.</b>
                        
                        有缘网分布式爬虫项目2
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.4" data-path="file/part07/7.4.html">
            
                
                    <a href="../../file/part07/7.4.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.4.</b>
                        
                        处理Redis里的数据
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.5" data-path="file/part07/7.5.html">
            
                
                    <a href="../../file/part07/7.5.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.5.</b>
                        
                        尝试改写新浪网分类资讯爬虫1
                    </a>
            
            
        </li>
    
        <li class="chapter active" data-level="7.6" data-path="file/part07/7.6.html">
            
                
                    <a href="../../file/part07/7.6.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.6.</b>
                        
                        尝试改写新浪网分类资讯爬虫2
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.7" data-path="file/part07/7.7.html">
            
                
                    <a href="../../file/part07/7.7.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.7.</b>
                        
                        IT桔子分布式项目1
                    </a>
            
            
        </li>
    
        <li class="chapter " data-level="7.8" data-path="file/part07/7.8.html">
            
                
                    <a href="../../file/part07/7.8.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>7.8.</b>
                        
                        IT桔子分布式项目2
                    </a>
            
            
        </li>
    

            </ul>
            
        </li>
    
        <li class="chapter " data-level="8" data-path="file/duanzi/duanzi.html">
            
                
                    <a href="../../file/duanzi/duanzi.html">
                
                        <i class="fa fa-check"></i>
                        
                            <b>8.</b>
                        
                        课余段子
                    </a>
            
            
        </li>
    


            
            <li class="divider"></li>
            <li>
                <a href="https://www.gitbook.com" target="blank" class="gitbook-link">
                    Published with GitBook
                </a>
            </li>
            
        </ul>
    </nav>
</div>

    <div class="book-body">
        <div class="body-inner">
            <div class="book-header" role="navigation">
    <!-- Actions Left -->
    

    <!-- Title -->
    <h1>
        <i class="fa fa-circle-o-notch fa-spin"></i>
        <a href="../../" >Python爬虫课程讲义</a>
    </h1>
</div>

            <div class="page-wrapper" tabindex="-1" role="main">
                <div class="page-inner">
                
                
                    <section class="normal" id="section-">
                    
                        <h3 id="&#x5C06;&#x5DF2;&#x6709;&#x7684;&#x65B0;&#x6D6A;&#x7F51;&#x5206;&#x7C7B;&#x8D44;&#x8BAF;scrapy&#x722C;&#x866B;&#x9879;&#x76EE;&#xFF0C;&#x4FEE;&#x6539;&#x4E3A;&#x57FA;&#x4E8E;redisspider&#x7C7B;&#x7684;scrapyredis&#x5206;&#x5E03;&#x5F0F;&#x722C;&#x866B;&#x9879;&#x76EE;">&#x5C06;&#x5DF2;&#x6709;&#x7684;&#x65B0;&#x6D6A;&#x7F51;&#x5206;&#x7C7B;&#x8D44;&#x8BAF;Scrapy&#x722C;&#x866B;&#x9879;&#x76EE;&#xFF0C;&#x4FEE;&#x6539;&#x4E3A;&#x57FA;&#x4E8E;RedisSpider&#x7C7B;&#x7684;scrapy-redis&#x5206;&#x5E03;&#x5F0F;&#x722C;&#x866B;&#x9879;&#x76EE;</h3>
<p>&#x6CE8;&#xFF1A;items&#x6570;&#x636E;&#x76F4;&#x63A5;&#x5B58;&#x50A8;&#x5728;Redis&#x6570;&#x636E;&#x5E93;&#x4E2D;&#xFF0C;&#x8FD9;&#x4E2A;&#x529F;&#x80FD;&#x5DF2;&#x7ECF;&#x7531;scrapy-redis&#x81EA;&#x884C;&#x5B9E;&#x73B0;&#x3002;&#x9664;&#x975E;&#x5355;&#x72EC;&#x505A;&#x989D;&#x5916;&#x5904;&#x7406;(&#x6BD4;&#x5982;&#x76F4;&#x63A5;&#x5B58;&#x5165;&#x672C;&#x5730;&#x6570;&#x636E;&#x5E93;&#x7B49;)&#xFF0C;&#x5426;&#x5219;&#x4E0D;&#x7528;&#x7F16;&#x5199;pipelines.py&#x4EE3;&#x7801;&#x3002;</p>
<h2 id="itemspy&#x6587;&#x4EF6;">items.py&#x6587;&#x4EF6;</h2>
<pre><code class="lang-python"><span class="hljs-comment"># items.py</span>

<span class="hljs-comment"># -*- coding: utf-8 -*-</span>

<span class="hljs-keyword">import</span> scrapy

<span class="hljs-keyword">import</span> sys
reload(sys)
sys.setdefaultencoding(<span class="hljs-string">&quot;utf-8&quot;</span>)

<span class="hljs-class"><span class="hljs-keyword">class</span> <span class="hljs-title">SinaItem</span><span class="hljs-params">(scrapy.Item)</span>:</span>
    <span class="hljs-comment"># &#x5927;&#x7C7B;&#x7684;&#x6807;&#x9898; &#x548C; url</span>
    parentTitle = scrapy.Field()
    parentUrls = scrapy.Field()

    <span class="hljs-comment"># &#x5C0F;&#x7C7B;&#x7684;&#x6807;&#x9898; &#x548C; &#x5B50;url</span>
    subTitle = scrapy.Field()
    subUrls = scrapy.Field()

    <span class="hljs-comment"># &#x5C0F;&#x7C7B;&#x76EE;&#x5F55;&#x5B58;&#x50A8;&#x8DEF;&#x5F84;</span>
    <span class="hljs-comment"># subFilename = scrapy.Field()</span>

    <span class="hljs-comment"># &#x5C0F;&#x7C7B;&#x4E0B;&#x7684;&#x5B50;&#x94FE;&#x63A5;</span>
    sonUrls = scrapy.Field()

    <span class="hljs-comment"># &#x6587;&#x7AE0;&#x6807;&#x9898;&#x548C;&#x5185;&#x5BB9;</span>
    head = scrapy.Field()
    content = scrapy.Field()
</code></pre>
<h2 id="settingspy&#x6587;&#x4EF6;">settings.py&#x6587;&#x4EF6;</h2>
<pre><code class="lang-python"><span class="hljs-comment"># settings.py</span>

SPIDER_MODULES = [<span class="hljs-string">&apos;Sina.spiders&apos;</span>]
NEWSPIDER_MODULE = <span class="hljs-string">&apos;Sina.spiders&apos;</span>

USER_AGENT = <span class="hljs-string">&apos;scrapy-redis (+https://github.com/rolando/scrapy-redis)&apos;</span>

DUPEFILTER_CLASS = <span class="hljs-string">&quot;scrapy_redis.dupefilter.RFPDupeFilter&quot;</span>
SCHEDULER = <span class="hljs-string">&quot;scrapy_redis.scheduler.Scheduler&quot;</span>
SCHEDULER_PERSIST = <span class="hljs-keyword">True</span>
SCHEDULER_QUEUE_CLASS = <span class="hljs-string">&quot;scrapy_redis.queue.SpiderPriorityQueue&quot;</span>
<span class="hljs-comment">#SCHEDULER_QUEUE_CLASS = &quot;scrapy_redis.queue.SpiderQueue&quot;</span>
<span class="hljs-comment">#SCHEDULER_QUEUE_CLASS = &quot;scrapy_redis.queue.SpiderStack&quot;</span>

ITEM_PIPELINES = {
<span class="hljs-comment">#    &apos;Sina.pipelines.SinaPipeline&apos;: 300,</span>
    <span class="hljs-string">&apos;scrapy_redis.pipelines.RedisPipeline&apos;</span>: <span class="hljs-number">400</span>,
}

LOG_LEVEL = <span class="hljs-string">&apos;DEBUG&apos;</span>

<span class="hljs-comment"># Introduce an artifical delay to make use of parallelism. to speed up the</span>
<span class="hljs-comment"># crawl.</span>
DOWNLOAD_DELAY = <span class="hljs-number">1</span>

REDIS_HOST = <span class="hljs-string">&quot;192.168.13.26&quot;</span>
REDIS_PORT = <span class="hljs-number">6379</span>
</code></pre>
<h2 id="spiderssinapy">spiders/sina.py</h2>
<pre><code class="lang-python"><span class="hljs-comment"># sina.py</span>

<span class="hljs-comment"># -*- coding: utf-8 -*-</span>

<span class="hljs-keyword">from</span> Sina.items <span class="hljs-keyword">import</span> SinaItem
<span class="hljs-keyword">from</span> scrapy_redis.spiders <span class="hljs-keyword">import</span> RedisSpider
<span class="hljs-comment">#from scrapy.spiders import Spider</span>
<span class="hljs-keyword">import</span> scrapy

<span class="hljs-keyword">import</span> sys
reload(sys)
sys.setdefaultencoding(<span class="hljs-string">&quot;utf-8&quot;</span>)

<span class="hljs-comment">#class SinaSpider(Spider):</span>
<span class="hljs-class"><span class="hljs-keyword">class</span> <span class="hljs-title">SinaSpider</span><span class="hljs-params">(RedisSpider)</span>:</span>
    name= <span class="hljs-string">&quot;sina&quot;</span>
    redis_key = <span class="hljs-string">&quot;sinaspider:start_urls&quot;</span>
    <span class="hljs-comment">#allowed_domains= [&quot;sina.com.cn&quot;]</span>
    <span class="hljs-comment">#start_urls= [</span>
    <span class="hljs-comment">#   &quot;http://news.sina.com.cn/guide/&quot;</span>
    <span class="hljs-comment">#]#&#x8D77;&#x59CB;urls&#x5217;&#x8868;</span>

    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">__init__</span><span class="hljs-params">(self, *args, **kwargs)</span>:</span>
        domain = kwargs.pop(<span class="hljs-string">&apos;domain&apos;</span>, <span class="hljs-string">&apos;&apos;</span>)
        self.allowed_domains = filter(<span class="hljs-keyword">None</span>, domain.split(<span class="hljs-string">&apos;,&apos;</span>))
        super(SinaSpider, self).__init__(*args, **kwargs)


    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">parse</span><span class="hljs-params">(self, response)</span>:</span>
        items= []

        <span class="hljs-comment"># &#x6240;&#x6709;&#x5927;&#x7C7B;&#x7684;url &#x548C; &#x6807;&#x9898;</span>
        parentUrls = response.xpath(<span class="hljs-string">&apos;//div[@id=\&quot;tab01\&quot;]/div/h3/a/@href&apos;</span>).extract()
        parentTitle = response.xpath(<span class="hljs-string">&quot;//div[@id=\&quot;tab01\&quot;]/div/h3/a/text()&quot;</span>).extract()

        <span class="hljs-comment"># &#x6240;&#x6709;&#x5C0F;&#x7C7B;&#x7684;ur &#x548C; &#x6807;&#x9898;</span>
        subUrls  = response.xpath(<span class="hljs-string">&apos;//div[@id=\&quot;tab01\&quot;]/div/ul/li/a/@href&apos;</span>).extract()
        subTitle = response.xpath(<span class="hljs-string">&apos;//div[@id=\&quot;tab01\&quot;]/div/ul/li/a/text()&apos;</span>).extract()

        <span class="hljs-comment">#&#x722C;&#x53D6;&#x6240;&#x6709;&#x5927;&#x7C7B;</span>
        <span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> range(<span class="hljs-number">0</span>, len(parentTitle)):

            <span class="hljs-comment"># &#x6307;&#x5B9A;&#x5927;&#x7C7B;&#x7684;&#x8DEF;&#x5F84;&#x548C;&#x76EE;&#x5F55;&#x540D;</span>
            <span class="hljs-comment">#parentFilename = &quot;./Data/&quot; + parentTitle[i]</span>

            <span class="hljs-comment">#&#x5982;&#x679C;&#x76EE;&#x5F55;&#x4E0D;&#x5B58;&#x5728;&#xFF0C;&#x5219;&#x521B;&#x5EFA;&#x76EE;&#x5F55;</span>
            <span class="hljs-comment">#if(not os.path.exists(parentFilename)):</span>
            <span class="hljs-comment">#    os.makedirs(parentFilename)</span>

            <span class="hljs-comment"># &#x722C;&#x53D6;&#x6240;&#x6709;&#x5C0F;&#x7C7B;</span>
            <span class="hljs-keyword">for</span> j <span class="hljs-keyword">in</span> range(<span class="hljs-number">0</span>, len(subUrls)):
                item = SinaItem()

                <span class="hljs-comment"># &#x4FDD;&#x5B58;&#x5927;&#x7C7B;&#x7684;title&#x548C;urls</span>
                item[<span class="hljs-string">&apos;parentTitle&apos;</span>] = parentTitle[i]
                item[<span class="hljs-string">&apos;parentUrls&apos;</span>] = parentUrls[i]

                <span class="hljs-comment"># &#x68C0;&#x67E5;&#x5C0F;&#x7C7B;&#x7684;url&#x662F;&#x5426;&#x4EE5;&#x540C;&#x7C7B;&#x522B;&#x5927;&#x7C7B;url&#x5F00;&#x5934;&#xFF0C;&#x5982;&#x679C;&#x662F;&#x8FD4;&#x56DE;True (sports.sina.com.cn &#x548C; sports.sina.com.cn/nba)</span>
                if_belong = subUrls[j].startswith(item[<span class="hljs-string">&apos;parentUrls&apos;</span>])

                <span class="hljs-comment"># &#x5982;&#x679C;&#x5C5E;&#x4E8E;&#x672C;&#x5927;&#x7C7B;&#xFF0C;&#x5C06;&#x5B58;&#x50A8;&#x76EE;&#x5F55;&#x653E;&#x5728;&#x672C;&#x5927;&#x7C7B;&#x76EE;&#x5F55;&#x4E0B;</span>
                <span class="hljs-keyword">if</span>(if_belong):
                    <span class="hljs-comment">#subFilename =parentFilename + &apos;/&apos;+ subTitle[j]</span>

                    <span class="hljs-comment"># &#x5982;&#x679C;&#x76EE;&#x5F55;&#x4E0D;&#x5B58;&#x5728;&#xFF0C;&#x5219;&#x521B;&#x5EFA;&#x76EE;&#x5F55;</span>
                    <span class="hljs-comment">#if(not os.path.exists(subFilename)):</span>
                    <span class="hljs-comment">#    os.makedirs(subFilename)</span>

                    <span class="hljs-comment"># &#x5B58;&#x50A8; &#x5C0F;&#x7C7B;url&#x3001;title&#x548C;filename&#x5B57;&#x6BB5;&#x6570;&#x636E;</span>
                    item[<span class="hljs-string">&apos;subUrls&apos;</span>] = subUrls[j]
                    item[<span class="hljs-string">&apos;subTitle&apos;</span>] =subTitle[j]
                    <span class="hljs-comment">#item[&apos;subFilename&apos;] = subFilename</span>

                    items.append(item)

        <span class="hljs-comment">#&#x53D1;&#x9001;&#x6BCF;&#x4E2A;&#x5C0F;&#x7C7B;url&#x7684;Request&#x8BF7;&#x6C42;&#xFF0C;&#x5F97;&#x5230;Response&#x8FDE;&#x540C;&#x5305;&#x542B;meta&#x6570;&#x636E; &#x4E00;&#x540C;&#x4EA4;&#x7ED9;&#x56DE;&#x8C03;&#x51FD;&#x6570; second_parse &#x65B9;&#x6CD5;&#x5904;&#x7406;</span>
        <span class="hljs-keyword">for</span> item <span class="hljs-keyword">in</span> items:
            <span class="hljs-keyword">yield</span> scrapy.Request( url = item[<span class="hljs-string">&apos;subUrls&apos;</span>], meta={<span class="hljs-string">&apos;meta_1&apos;</span>: item}, callback=self.second_parse)

    <span class="hljs-comment">#&#x5BF9;&#x4E8E;&#x8FD4;&#x56DE;&#x7684;&#x5C0F;&#x7C7B;&#x7684;url&#xFF0C;&#x518D;&#x8FDB;&#x884C;&#x9012;&#x5F52;&#x8BF7;&#x6C42;</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">second_parse</span><span class="hljs-params">(self, response)</span>:</span>
        <span class="hljs-comment"># &#x63D0;&#x53D6;&#x6BCF;&#x6B21;Response&#x7684;meta&#x6570;&#x636E;</span>
        meta_1= response.meta[<span class="hljs-string">&apos;meta_1&apos;</span>]

        <span class="hljs-comment"># &#x53D6;&#x51FA;&#x5C0F;&#x7C7B;&#x91CC;&#x6240;&#x6709;&#x5B50;&#x94FE;&#x63A5;</span>
        sonUrls = response.xpath(<span class="hljs-string">&apos;//a/@href&apos;</span>).extract()

        items= []
        <span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> range(<span class="hljs-number">0</span>, len(sonUrls)):
            <span class="hljs-comment"># &#x68C0;&#x67E5;&#x6BCF;&#x4E2A;&#x94FE;&#x63A5;&#x662F;&#x5426;&#x4EE5;&#x5927;&#x7C7B;url&#x5F00;&#x5934;&#x3001;&#x4EE5;.shtml&#x7ED3;&#x5C3E;&#xFF0C;&#x5982;&#x679C;&#x662F;&#x8FD4;&#x56DE;True</span>
            if_belong = sonUrls[i].endswith(<span class="hljs-string">&apos;.shtml&apos;</span>) <span class="hljs-keyword">and</span> sonUrls[i].startswith(meta_1[<span class="hljs-string">&apos;parentUrls&apos;</span>])

            <span class="hljs-comment"># &#x5982;&#x679C;&#x5C5E;&#x4E8E;&#x672C;&#x5927;&#x7C7B;&#xFF0C;&#x83B7;&#x53D6;&#x5B57;&#x6BB5;&#x503C;&#x653E;&#x5728;&#x540C;&#x4E00;&#x4E2A;item&#x4E0B;&#x4FBF;&#x4E8E;&#x4F20;&#x8F93;</span>
            <span class="hljs-keyword">if</span>(if_belong):
                item = SinaItem()
                item[<span class="hljs-string">&apos;parentTitle&apos;</span>] =meta_1[<span class="hljs-string">&apos;parentTitle&apos;</span>]
                item[<span class="hljs-string">&apos;parentUrls&apos;</span>] =meta_1[<span class="hljs-string">&apos;parentUrls&apos;</span>]
                item[<span class="hljs-string">&apos;subUrls&apos;</span>] =meta_1[<span class="hljs-string">&apos;subUrls&apos;</span>]
                item[<span class="hljs-string">&apos;subTitle&apos;</span>] =meta_1[<span class="hljs-string">&apos;subTitle&apos;</span>]
                <span class="hljs-comment">#item[&apos;subFilename&apos;] = meta_1[&apos;subFilename&apos;]</span>
                item[<span class="hljs-string">&apos;sonUrls&apos;</span>] = sonUrls[i]
                items.append(item)

        <span class="hljs-comment">#&#x53D1;&#x9001;&#x6BCF;&#x4E2A;&#x5C0F;&#x7C7B;&#x4E0B;&#x5B50;&#x94FE;&#x63A5;url&#x7684;Request&#x8BF7;&#x6C42;&#xFF0C;&#x5F97;&#x5230;Response&#x540E;&#x8FDE;&#x540C;&#x5305;&#x542B;meta&#x6570;&#x636E; &#x4E00;&#x540C;&#x4EA4;&#x7ED9;&#x56DE;&#x8C03;&#x51FD;&#x6570; detail_parse &#x65B9;&#x6CD5;&#x5904;&#x7406;</span>
        <span class="hljs-keyword">for</span> item <span class="hljs-keyword">in</span> items:
                <span class="hljs-keyword">yield</span> scrapy.Request(url=item[<span class="hljs-string">&apos;sonUrls&apos;</span>], meta={<span class="hljs-string">&apos;meta_2&apos;</span>:item}, callback = self.detail_parse)

    <span class="hljs-comment"># &#x6570;&#x636E;&#x89E3;&#x6790;&#x65B9;&#x6CD5;&#xFF0C;&#x83B7;&#x53D6;&#x6587;&#x7AE0;&#x6807;&#x9898;&#x548C;&#x5185;&#x5BB9;</span>
    <span class="hljs-function"><span class="hljs-keyword">def</span> <span class="hljs-title">detail_parse</span><span class="hljs-params">(self, response)</span>:</span>
        item = response.meta[<span class="hljs-string">&apos;meta_2&apos;</span>]
        content = <span class="hljs-string">&quot;&quot;</span>
        head = response.xpath(<span class="hljs-string">&apos;//h1[@id=\&quot;main_title\&quot;]/text()&apos;</span>).extract()
        content_list = response.xpath(<span class="hljs-string">&apos;//div[@id=\&quot;artibody\&quot;]/p/text()&apos;</span>).extract()

        <span class="hljs-comment"># &#x5C06;p&#x6807;&#x7B7E;&#x91CC;&#x7684;&#x6587;&#x672C;&#x5185;&#x5BB9;&#x5408;&#x5E76;&#x5230;&#x4E00;&#x8D77;</span>
        <span class="hljs-keyword">for</span> content_one <span class="hljs-keyword">in</span> content_list:
            content += content_one

        item[<span class="hljs-string">&apos;head&apos;</span>]= head[<span class="hljs-number">0</span>] <span class="hljs-keyword">if</span> len(head) &gt; <span class="hljs-number">0</span> <span class="hljs-keyword">else</span> <span class="hljs-string">&quot;NULL&quot;</span>

        item[<span class="hljs-string">&apos;content&apos;</span>]= content

        <span class="hljs-keyword">yield</span> item
</code></pre>
<h2 id="&#x6267;&#x884C;&#xFF1A;">&#x6267;&#x884C;&#xFF1A;</h2>
<pre><code class="lang-python">slave&#x7AEF;&#xFF1A;
scrapy runspider sina.py

Master&#x7AEF;&#xFF1A;
redis-cli&gt; lpush sinaspider:start_urls http://news.sina.com.cn/guide/
</code></pre>
<footer class="page-footer"><span class="copyright">Copyright &#xA9; BigCat all right reserved&#xFF0C;powered by Gitbook</span><span class="footer-modification">&#x300C;Revision Time:
2016-12-17 00:58:49&#x300D;
</span></footer>
                    
                    </section>
                
                
                </div>
            </div>
        </div>

        
        <a href="../../file/part07/7.5.html" class="navigation navigation-prev " aria-label="Previous page: 尝试改写新浪网分类资讯爬虫1"><i class="fa fa-angle-left"></i></a>
        
        
        <a href="../../file/part07/7.7.html" class="navigation navigation-next " aria-label="Next page: IT桔子分布式项目1"><i class="fa fa-angle-right"></i></a>
        
    </div>
</div>

        
<script src="../../gitbook/app.js"></script>

    
    <script src="../../gitbook/plugins/gitbook-plugin-splitter/splitter.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-toggle-chapters/toggle.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-fontsettings/buttons.js"></script>
    

    
    <script src="../../gitbook/plugins/gitbook-plugin-livereload/plugin.js"></script>
    

<script>
require(["gitbook"], function(gitbook) {
    var config = {"disqus":{"shortName":"gitbookuse"},"github":{"url":"https://github.com/dododream"},"search-pro":{"cutWordLib":"nodejieba","defineWord":["gitbook-use"]},"sharing":{"weibo":true,"facebook":true,"twitter":true,"google":false,"instapaper":false,"vk":false,"all":["facebook","google","twitter","weibo","instapaper"]},"tbfed-pagefooter":{"copyright":"Copyright © BigCat","modify_label":"「Revision Time:","modify_format":"YYYY-MM-DD HH:mm:ss」"},"baidu":{"token":"ff100361cdce95dd4c8fb96b4009f7bc"},"sitemap":{"hostname":"http://www.treenewbee.top"},"donate":{"wechat":"http://weixin.png","alipay":"http://alipay.png","title":"","button":"赏","alipayText":"支付宝打赏","wechatText":"微信打赏"},"edit-link":{"base":"https://github.com/dododream/edit","label":"Edit This Page"},"splitter":{},"toggle-chapters":{},"highlight":{},"fontsettings":{"theme":"white","family":"sans","size":2},"livereload":{}};
    gitbook.start(config);
});
</script>

        <!-- body:end -->
    </body>
    <!-- End of book Python爬虫课程讲义 -->
</html>
