import re
import  time
import requests
from util.logger import get_looger
import os
import shutil

logger = get_looger("彼岸网图片.log", __file__)

logger.info("爬虫启动")

try:
    root = 'static/彼岸网图片/'
    if os.path.exists(root):
        shutil.rmtree(root)
    os.makedirs(root)

    url = "https://pic.netbian.com/index.html"

    headers = {
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
    }

    res = requests.get(url, headers=headers)

    # print(res.status_code)

    img_res = res.content.decode('gbk')

    items = re.findall(r'<a href="(/tupian/\d{5}.html)" title="(.*?)".*?src="(.*?)".*?</a>', img_res, re.S)
    for item in items:
        time.sleep(1)
        detail_url = "https://pic.netbian.com" + item[0]
        title = item[1]
        # logger.info(f"正在爬取：{title}")

        img_url = "https://pic.netbian.com" + item[2]
        logger.info(f"正在爬取：{title}缩略图")
        # print(detail_url, title, img_url)

        thumb_res = requests.get(img_url, headers=headers)
        with open(f"{root}{title}_thumb.jpg", 'wb') as f:
            f.write(thumb_res.content)
        logger.info(f"成功保存：{title}缩略图")

        detail_res = requests.get(detail_url, headers={
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
            "referer": "https://pic.netbian.com/index.html",
        })
        detail_img = detail_res.content.decode('gbk')
        dts = re.search(r'<div class="photo-pic">.*?src="(.*?)".*?</div>',detail_img,re.S)
        end_url ="https://pic.netbian.com" + dts.group(1)
        if end_url:
            logger.info(f"正在爬取：{title}详情图")
            detail_img_res = requests.get(end_url, headers=headers)
            with open(f"{root}{title}", 'wb') as f:
                f.write(detail_img_res.content)
            logger.info(f"成功保存：{title}详情图")

        # print(img_url,end_url,)
        break


except Exception as e:
    logger.error(e)
    logger.error("出错了")

logger.info("爬虫结束")

