# -*- coding: utf-8 -*-
# Author: xiantingDeng
# File: 04_异常处理.py
# Time: 17:29


# 在网络爬虫运行时出现异常，若不处理则会因报错而终止运行，导致爬取数据中断，所以异常处理还是十分重要的。
'''
from urllib import request
from urllib import error

url = 'http://www.wer3214e13wer3.com/'

req = request.Request(url)
try:
    response = request.urlopen(req)
    html = response.read().decode("utf-8")
    print(len(html))
except error.URLError as e:
    print(e.reason)
'''

# 测试HTTPError的异常处理
# HTTPError内有三个属性：code 返回HTTP状态码，如404 ； reason 返回错误原因； headers 返回请求头
from urllib import request
from urllib import error

url = "https://img-ads.csdn.net/2018/20180420184005werqwefsd9410.png"
req = request.Request(url)
try:
    response = request.urlopen(req)
    html = response.read().decode('utf-8')
    print(len(html))
except error.HTTPError as e:
    print(e.reason) #输出错误信息
    print(e.code)    #输出HTTP状态码
    print(e.headers)