from bs4 import BeautifulSoup
import urllib.request


def loadPage(url):
    proxyswitch = True
    # 构建一个Handler处理器对象，参数是一个字典类型，包括代理类型和代理服务器IP+PROT
    httpproxy_handler = urllib.request.ProxyHandler(
        {"http":"223.199.29.192:9999"})
    # 构建了一个没有代理的处理器对象
    nullproxy_handler = urllib.request.ProxyHandler({})
    if proxyswitch:
        opener = urllib.request.build_opener(httpproxy_handler)
    else:
        opener = urllib.request.build_opener(nullproxy_handler)
    # 构建了一个全局的opener，之后所有的请求都可以用urlopen()方式去发送，也附带Handler的功能
    urllib.request.install_opener(opener)

    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36"}
    request = urllib.request.Request(url, headers=headers)
    html = urllib.request.urlopen(request).read()
    soup=BeautifulSoup(html,'html5lib',from_encoding='gbk')  #使用bs4转换 获取的代码
    mytable=soup.find_all(id="datalist")
    
    for line in mytable[0].select("tr"):
        for mydata in line.select("td:nth-child(-n+6)"):  #选区前6个标签
            print(mydata.get_text())
 

    
if __name__ == "__main__":
    url='https://quote.stockstar.com/fund/stock_3_1_2.html'
    loadPage(url)
