
import random
import requests
from urllib.request import urlretrieve
from bs4 import *
from lxml import etree#导入xpath

def getheaders():
    user_agent_list = [ \
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1" \
        "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11", \
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6", \
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6", \
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1", \
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5", \
        "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5", \
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3", \
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3", \
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24", \
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
    ]
    UserAgent=random.choice(user_agent_list)
    headers = {'User-Agent': UserAgent}
    return headers

ip_s = [
    'http://118.212.137.135:31288',
    'http://116.213.98.6:8080',
    'http://114.215.95.188:3128'
    ]

def get_procie():
    ip = random.choice(ip_s)
    proxy = {'http':ip}
    return proxy


url_first = 'http://www.pansoso.com'
url = 'http://www.pansoso.com/zh/python基础教程pdf'
html = requests.get(url,headers=getheaders(),proxies=get_procie())

#print(html.text)
#with open('小测试/html.txt','w',encoding='utf-8') as f:
#    f.write(html.text)

soup = BeautifulSoup(html.text,'lxml')
div = soup.find_all('div',class_='pss')
h2 = BeautifulSoup(str(div),'lxml')
a = h2.find_all('a',attrs={'target':'_blank'})
url_list = []
for i in a:
    if i.has_attr('href'):
        print(url_first + i.get('href'))
        url_list.append(url_first + i.get('href'))


for i in url_list[:1]:
    text = requests.get(i,headers=getheaders(),proxies=get_procie())
    #print(text.text)
    selector=etree.HTML(text.text, parser=None, base_url=None)
    content = selector.xpath('//div[@id="con"]/div[@class="content"]/div[@class="down"]/span/a[@class="red"]/@href')
    for con in content:
        con_text = requests.get(con,headers=getheaders(),proxies=get_procie())
        print(con_text.text)
        xp = etree.HTML(con_text.text)
        baidu = 'http://www.pansoso.com/?a=go&url=68ad0de407e70a7919b4d95e8896975ewY4e3c2ead7eac2be070affd6cd8056925MvMWRGZEhTcmY&t=cHl0aG9u5Z!656GA5pWZ56iLKOesrDLniYgg5L!u6K6i54mIKS5wZGY=&dx=MzRNQg==&m='
        baidu_text = requests.get(baidu,headers=getheaders(),proxies=get_procie())
        print(baidu_text.text)

print('下载完成')

