# -*- coding=utf=8 -*-

import urllib.parse
import urllib.request
import requests
import sys
import re
import time
from bs4 import BeautifulSoup

sum_retries=1000

def request_webpage_url(url ,proxies,num_retries):
    #print('Downloading:', url)
    try:
        body_response = requests.get(url, verify=False, proxies=proxies)
    except :
        t, v, tb = sys.exc_info()
        # traceback.print_tb(tb)
        #traceback.print_tb(tb)
        body_response = None
        if num_retries > 0:
            print("request请求失败，开始重试，第"+str(sum_retries-num_retries+1)+"次")
            time.sleep(5)
            return request_webpage_url(url,proxies, num_retries-1)
    return body_response

if __name__ == '__main__':
    url="https://db.yaozh.com/hmap/38994.html"
    html_body_response = request_webpage_url(url, None, sum_retries)

    respon_webpage=html_body_response.content.decode("utf-8")
    #print(respon_webpage)
    soup = BeautifulSoup(respon_webpage, 'html.parser')
    # print(soup)
    tr = soup.find(text="省").parent
    for td in tr:
        #_soup = BeautifulSoup(str(tr), 'html.parser')
        print(td.text)

