import requests
from bs4 import BeautifulSoup
import time

base_url='https://www.936aa.com'

headers = {'Accept': 'text/html, application/xhtml+xml, image/jxr, */*',
               'Accept - Encoding':'gzip, deflate',
               'Accept-Language':'zh-Hans-CN, zh-Hans; q=0.5',
               'Connection':'Keep-Alive',
               'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36 Edge/15.15063'}
imgurl_list=[]
for i in range(1,1026):
    try:

        url='https://www.936aa.com/htm/gif0/'+str(i)+'.htm'

        r = requests.get(url,headers=headers)
        content=r.content
        soup = BeautifulSoup(content,"lxml")

        pa=soup.find('div',attrs={'class':'picContent'})
        img_list=pa.find_all('img')

        for img in img_list:
            imgurl_list.append(img['src'])

    except Exception as e:
            print (e)
    finally:
        time.sleep(0.01)
        print(str(1026-i),end='\r')

print('strat write to disk...')
with open('gifurl.txt',mode='a+') as file:
    for imgurl in imgurl_list:
        file.write(imgurl+'\n')

# count=0
# for imgurl in imgurl_list[:10]:
#     print(imgurl)
#     r = requests.get(imgurl,headers=headers)
#     count=count+1
#     with open(str(count)+'.gif','wb') as file:
#         file.write(r.content)
