import requests


def get_image(url):
    head = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36'}
    try:
        r = requests.get(url=url, headers=head)
        r.raise_for_status()
        return r.content
    except Exception as err:
        print(err)


def save_imag(path, content):
    with open(path, 'wb')as f:
        f.write(content)


if __name__ == '__main__':
    url_list = ['http://www.bspider.top/static/yh31/images/202103302113573793.gif',
             'http://www.bspider.top/static/yh31/images/202103302125467574.gif',
             'http://www.bspider.top/static/yh31/images/202008242134566465.gif',
             'http://www.bspider.top/static/yh31/images/202103302056530751.gif']
    for url in url_list:
        imag_content = get_image(url)
        dir = 'd:\\表情包\\'
        file_name = url.split('/')[-1:][0]
        path = dir + file_name
        save_imag(path, imag_content)
