
import urllib
import urllib.request
import re
from bs4 import BeautifulSoup

class CatchPics():

    baseurl = "http://huaban.com"
    boards_urls = []

    def get_content(self, url):
        request = urllib.request.Request(url)
        response = urllib.request.urlopen(request)
        return response.read().decode('utf-8')

    def get_urls(self, content):
        soup = BeautifulSoup(content, 'html.parser')
        t_alla = soup.find_all("a")
        t_a_hrefs = []
        for a in t_alla:
            if a.has_attr('href'):
                t_a_hrefs.append(a['href'])

        # self.boards_urls = []
        self.pins_urls = []
        boards_save = False
        if len(self.boards_urls) == 0:
            boards_save = True
        pattern = re.compile('/[a-zA-Z]*/[0-9]*/', re.S)
        for href in t_a_hrefs:
            t_a_real_hrefs = re.findall(pattern, href)
            for t_a_real_href in t_a_real_hrefs:
                if t_a_real_href.find("pins") != -1 and t_a_real_href.find('pins') != 0:
                    self.pins_urls.append(t_a_real_href)
                ## boards_urls only set once , for forbidden recurse
                if boards_save:
                    if t_a_real_href.find("boards") != -1 and t_a_real_href.find("boards") != 0:
                        self.boards_urls.append(t_a_real_href)

        print("len pins: ", len(self.pins_urls), ", len boards: ", len(self.boards_urls))
        return t_a_real_hrefs

    def get_pics_pins(self):
        for url in self.pins_urls:
            pins_urls = self.baseurl + url
            req = urllib.request.Request(pins_urls)
            resp = urllib.request.urlopen(req)
            soup = BeautifulSoup(resp, 'html.parser')
            t_imgs_div = soup.find(attrs={'class':'main-image'})

            imgs_urls = []
            try:
                img = t_imgs_div.div.a.img
                if img == None:
                    img = t_imgs_div.div.img
                if img.has_attr('src'):
                    imgs_url = {}
                    len_img_url = len(img['src'])
                    if img['src'].find("hbimg.b0.upaiyun"):
                        imgs_url["url"] = img['src'][2:len_img_url]
                        imgs_url["name"] = img['src'][23:len_img_url]
                        imgs_urls.append(imgs_url)
            except Exception as err:
                print(err)

            self.get_pics(imgs_urls)

    def catch(self, url):
        ## Get content
        print("goto cache url: ", url)
        resp = self.get_content(url)
        print("Get contents response")
        ## return pics no use
        pics = self.get_urls(resp)
        self.get_pics_pins()

    def get_imgs_boards(self):
        indx = 1
        for url in self.boards_urls:
            print("Now goto get boards index ", indx)
            self.catch(self.baseurl + url)
            indx = indx + 1

    def save_pics(self, url , filename):
        if len(url) < 8:
            return
        print("Go to save with name ", filename, ", with url: ", url)
        try:
            u = urllib.request.urlopen(url)
            data = u.read()
            f = open(filename, "wb")
            f.write(data)
            f.close()
        except Exception as err:
            print(err)


    def get_pics(self, pics):
        i = 0
        for pic in pics:
            # print("url: ", pic["url"], ", name: ", pic["name"])
            self.save_pics("http://" + pic["url"], "./new/" + pic["name"] + ".jpg")
            i = i+1

if __name__ == "__main__":
    cp = CatchPics()
    cp.catch("http://huaban.com/favorite/beauty")
    cp.get_imgs_boards()