'''
Author: momochong0
Date: 2021-05-25 00:40:48
LastEditors: momochong0
LastEditTime: 2021-05-25 00:47:41
Description: 要推网络荣誉出品
'''
#coding=utf-8

import urllib2
import re


class Spider:
    '''
		这是内涵段子的一个爬虫
	'''
    def __init__(self):
        self.enable = True
        self.page = 2  #当前要去爬第几页

    def load_page(self, page):
        '''
			发送内涵段子url请求,得到html源码
		'''
        #page=2
        url = "http://www.neihanpa.com/article/index_" + str(page) + ".html"
        user_agent = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;"
        headers = {"User-Agent": user_agent}

        req = urllib2.Request(url, headers=headers)

        response = urllib2.urlopen(req)

        html = response.read()

        #return html

        #new_html=html.decode("utf-8").encode("gb2312")

        #return new_html

        #用正则表达式将html过滤，得到所有的段子
        #所有段子在<div class="text-column-item box box-790">----------</div>

        pattern = re.compile(
            r'<div.*?class="text-column-item box box-790">(.*?)</div>', re.S)
        item_list = pattern.findall(html)

        return item_list

    def deal_one_page(self, item_list, page):
        '''
			处理一页的数据
		'''
        #print "第 %d页的段子有" %(page)
        print "正在存储第%d页的段子".decode("utf-8").encode("gb2312") % (page)

        for item in item_list:
            print "=========="
            print item.replace("<p>", "").replace("</p>", "").replace(
                "<br />", "").decode("utf-8").encode("gb2312")
            self.write_to_file(item)
        print "第%d页的段子存储完毕".decode("utf-8").encode("gb2312") % (page)

    def write_to_file(self, txt):
        '''
			写入文件
		'''
        f = open('E:\work\python\duanzi.txt', 'a')
        f.write(txt)
        f.write('-------------------------------------')
        f.close()

    def do_work(self):
        '''
			提供交互的过程
			让爬虫去工作
		'''
        while self.enable:
            print "按回车继续".decode("utf-8").encode("gb2312")
            print "输入quit退出".decode("utf-8").encode("gb2312")
            command = raw_input()
            if (command == "quit"):
                self.enable = False
                break
            item_list = self.load_page(self.page)
            self.deal_one_page(item_list, self.page)
            self.page += 1


#main

if __name__ == '__main__':
    #创建一个spider对象
    mySpider = Spider()
    #the_page=mySpider.load_page(1)

    #print the_page

    #item_list=mySpider.load_page(2)
    mySpider.do_work()
