import requests
import bs4


def open_url(url):
    headers = {
        "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1",
    }
    res = requests.get(url, headers=headers)
    return res


def saveImg(Url, saveName):
    res = open_url(Url)
    soup = bs4.BeautifulSoup(res.text, "html.parser")
    imgDiv = soup.find_all("div", class_="nr343245")[0]
    imgUrl = imgDiv.img["src"]
    with open(saveName + ".jpg", "wb") as file:
      file.write(open_url(imgUrl).content)
    return True

def main():
    Url = input("请输入图片地址：")
    imgPath = input("请输入保存路径：")
    errorCount = 0
    for n in range(500):
      try:
        saveImg(Url.split("_")[0]   +f"_{n+1}.html",  imgPath + "\\" + str(n+1))
        errorCount=0
        print(f"第{n+1}张图片保存成功")
      except Exception:
        print(f"第{n+1}张图片保存失败")
        errorCount = errorCount + 1
        if errorCount >= 3:
          print("连续错误三次，程序结束")
          break

if __name__ == "__main__":
    main()
