File size: 1,117 Bytes
4f424b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import os
import aiohttp
import asyncio
import time
start = time.perf_counter()
def get_urls():
    url = 'https://hashir672.serv00.net/'
    reqs = requests.get(url)
    soup = BeautifulSoup(reqs.text, 'html.parser')
    
    urls = []
    for link in soup.find_all('a'):
        file_link = link.get('href')
        urls.append(url+file_link)
        
        # r = requests.get(url+file_link, allow_redirects=True)
        # open("./extract_member/" + str(file_name), 'wb').write(r.content)
        # print(file_name)
    return urls


urls = get_urls()


async def download_image(url):
    # print(f"Downloading {url}")
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            with open("./extract_member/"+url.split("/")[-1], 'wb') as f:
                f.write(await resp.read())
    # print(f"Done downloading {url}")

async def main():
    await asyncio.gather(*[download_image(url) for url in urls])

asyncio.run(main())

print(f"Total time: {time.perf_counter() - start}")