#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/2/4 11:36
# @Author : diyhome
# @Site : https://gitee.com/diyhome/
# @File : run.py
# @Software: PyCharm

from TAnswer import TAnswer
from bs4 import BeautifulSoup
from proxy import ProxyIP
import requests
import os
import sys


def DownImg(url, proxy, middle_path="img"):
    _re_try_count = 5
    proxy_ip = proxy
    while _re_try_count > 0:
        html = ""
        # noinspection PyBroadException
        try:
            html = requests.get(url, proxies={"http": "http://{}".format(proxy_ip)}, headers={'User-Agent': 'Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166  Safari/535.19'})
        except Exception:
            _re_try_count -= 1
            continue
        file_name = url.split("/")[-1]
        aid_path = os.path.join(os.getcwd(), middle_path, file_name)
        dir_path = os.path.join(os.getcwd(), middle_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        with open(aid_path, "wb") as f:
            f.write(html.content)
        return True
    return False

page_title = ""
def get_img_url(url):
    global page_title
    list_original = []
    all_ta = TAnswer(url)
    raw_list = all_ta.get_json()
    page_title = all_ta.page_title
    for content in raw_list:
        soup = BeautifulSoup(content[1], 'lxml')
        for item in soup.find_all('img'):
            original_url = item.get('data-original')
            if original_url:
                list_original.append(original_url)
    return list_original

if __name__ == '__main__':
    pro_ip = ProxyIP()
    p_ip = pro_ip.get_proxy_ip()
    for index_url in range(1, len(sys.argv)):
        link = sys.argv[index_url]
        img_link_list = get_img_url(link)
        for img_url in img_link_list:
            print("Downloading %s" % img_url)
            flag_d = DownImg(img_url, p_ip, "img/%s" % page_title)
            if not flag_d:
                pro_ip.delete_proxy_ip(p_ip)
                p_ip = pro_ip.get_proxy_ip()
                continue