# -*- coding: utf-8 -*-
"""
@Time    : 2024/7/11 17:25 
@Author  : ZhangShenao 
@File    : serial_crawler.py
@Desc    : 简单的串行爬虫
"""
import time

from bacis.decorator.perf_log_decorator import perf_log


def crawl_page(url: str) -> None:
    """
    爬取单个页面
    :param url: 页面url
    """
    print(f'crawling {url} ...')
    time.sleep(float(url.split('-')[-1]))
    print(f'finish crawling {url}')


@perf_log
def crawl_pages(urls: list) -> None:
    """
    爬取多个页面
    :param urls: 页面url列表
    """

    for url in urls:
        crawl_page(url)


if __name__ == '__main__':
    pages = [f'url-{i}' for i in range(1, 5)]
    crawl_pages(pages)
