import requests,time#主要使用requests发起请求，进一步抓取连接
from lxml import etree#引入lxml库中的etree来处理返回报文


def Redirect(url):#预定义函数处理url，下面try..except为异常处理
    try :
        res = requests.get(url,timeout=10)
        url = res.url
    except Exception as e:
        print('4',e)
        time.sleep(1)
    return url

def baidu_search(wd,pn_max,sav_file_name):
    url = 'http://www.baidu.com/s'#设置搜索引擎为百度，也可以修改为其他搜索引擎
    return_set = set()
    a=0#统计写入文件的链接数，改变下面if中的a比较的值可以修改本爬虫抓取条数（默认为30条）
    for page in range(pn_max):
        pn = page*10
        querystring = {'wd':wd,'pn':pn}
        #headers为http请求头
        headers = {
            'pragma':'no-cache',
            'accept-encoding': 'gzip,deflate,br',
            'accept-language' : 'zh-CN,zh;q=0.8',
            'upgrade-insecure-requests' : '1',
            'user-agent': "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:62.0) Gecko/20100101 Firefox/62.0",
            'accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            'cache-control': "no-cache",
            'connection': "keep-alive",
        }

        try :
            response = requests.request('GET',url,headers=headers,params=querystring)#发送请求，获取返回报文
            #print('!!!!!!!!!!!!!!',response.url)
            selector = etree.HTML(response.text,parser = etree.HTMLParser(encoding='utf-8'))#创建etree,处理返回报文内容
        except Exception as e:
            print('页面加载失败',e)
            continue
        with open(sav_file_name,'a+') as f:#打开文件，在与此脚本同目录下，文件名为save_url_soup.txt
            for i in range(1,10):
                try :
                    if(a==30):
                        return return_set
                    context = selector.xpath('//*[@id="'+str(pn+i)+'"]/h3/a[1]/@href')#通过字符串匹配的方法找出返回报文中所包含的连接
                    #print(len(context),context[0])
                    i = Redirect(context[0])#i即为处理过后的包含“车辆”关键字的连接
                    #print('context='+context[0])
                    print ('i='+i)
                    f.write(i)#将连接写入文件中
                    f.write('\n')
                    print(a,"++++++++++++++++++++")
                    a=a+1
                    break
                    return_set.add(i)
                    f.write('\n')
                    print(a,"----------------")
                    
                except Exception as e:
                    print(i,return_set)
                    print('3',e)

    return return_set

if __name__ == '__main__':
    wd = '车辆'
    pn = 100
    save_file_name = 'save_url_soup.txt'
    return_set = baidu_search(wd,pn,save_file_name)
