#-*- coding:utf-8 -*-
import gzip
import urllib.request as ur
import lxml.etree as le
import user_agent

kw = input('请输入关键词:')
pn_start = int(input('起始页:'))
pn_end = int(input('终止页:'))

def getRequest(url):
    return ur.Request(
        url=url,
        headers={
            'User-Agent': user_agent.get_user_agent_pc()
        }
    )

def getProOpener():
    proxy_address = ur.urlopen('http://api.ip.data5u.com/dynamic/get.html?order=334955f4d8cd98d6f61f638dbd5bd8ae&sep=4').read().decode('utf-8').strip()
    proxy_handler = ur.ProxyHandler(
        {
            'http': proxy_address
        }
    )
    return ur.build_opener(proxy_handler)

for pn in range(pn_start,pn_end+1):
    url = 'https://so.csdn.net/so/search/s.do?p=%s&q=%s&t=blog&domain=&o=&s=&u=&l=&f=&rbg=0' % (pn, kw)
    request = getRequest(url)

    try:
        response = getProOpener().open(request).read()
        href_s = le.HTML(response).xpath('//span[@class="down fr"]/../span[@class="link"]/a/@href')
        print(href_s)
        for href in href_s:
            try:
                response_blog = getProOpener().open(
                    getRequest(href)
                )
                content = response_blog.read()
                title = le.HTML(content).xpath('//h1[@class = "title-article"]/text()')
                print(title)
                with open('blog/%s.html','wb') as f:
                    f.write(response_blog)
            except Exception as e:
                print(e)
    except:
        pass



