import urllib.request as ur
import lxml.etree as le
#通过urlopen拿到代理地址
#用Proxy_hander包装代理IP
#新建build_opener
#用opener打开包装的request
proxy_add = ur.urlopen("").read().decode().strip()
proxy_hanler = ur.ProxyHandler(
    #传入字典
    {
        "http":proxy_add
    }
)
opener = ur.build_opener(proxy_hanler)
request = ur.Request(url='edu.csdn.net')
res = opener.open(request).read()

# 模拟登录
# 新建Request,包含header，header包含User_Agent和Cookie
# 使用urlopen(request),
request = ur.Request(
    url='',
    headers={
        'User_Agent':"",
        'Cookie':""

    }
)
res = ur.urlopen(request).read().decode('utf-8')
#使用le,变成html,再用xpath筛选标题
html_x = le.HTML(res)
title = html_x.xpath('')
