#coding:UTF8
from bs4 import BeautifulSoup
from os.path import os
import time
import re
import urlparse
import urllib2
import requests
from gettext import gettext

'''
下载网页
'''
def download_html(url):
    if url is None:
        return None    
    response = urllib2.urlopen(url)
    if response.getcode() != 200:
        return None
    return response.read()
    

'''
获取相册链接
'''
def get_albums_url(root_url):    
    html_cont = download_html(root_url)        
    soup = BeautifulSoup(html_cont , 'html.parser' , from_encoding='utf-8')    
    new_urls = set()
    links = soup.find_all('a' , href=re.compile(r"/mm/"))
    for link in links:
        new_url = link['href']
        new_urls.add(new_url)
    return new_urls

'''
获取图片链接
'''
def get_picture_urls(root_url):
    picture_urls = []
    size = len('http://www.mmjpg.com/mm/')
    album_id = root_url[size:]    
    html_cont = download_html(root_url)        
    soup = BeautifulSoup(html_cont, 'html.parser' , from_encoding='utf-8')    
    piclist = soup.find('div', class_="page").find_all('a', href=re.compile(r"/mm/"))    
    picmax = piclist[-2].get_text()    
    ''' http://img.mmjpg.com/2017/1012/1.jpg '''
    i = 1
    while i <= int(picmax):
        data = {}
        data['url'] = 'http://img.mmjpg.com/2017/' + str(album_id) + '/' + str(i) +'.jpg'
        data['name'] = str(album_id) + '-' + str(i) +'.jpg'
        picture_urls.append(data) 
        i = i + 1
    
    return picture_urls

'''
下载图片
'''
def download_pictures(root_urls):
    count = 0
    for url in root_urls:              
        pic= requests.get(url['url'], timeout=10)
        # 下载图片
        fp = open(url['name'],'wb')
        fp.write(pic.content)
        fp.close()
        
        count = count + 1
        print "下载第" + str(count) + "个  ： " +  url['url']
        

'''
主窗口
'''
if __name__ == "__main__":
    
    index = 3
    while index <= 4: #69
        root_url = "http://www.mmjpg.com/home/" + str(index)        
        albums_urls = get_albums_url(root_url)
        for albums_url in albums_urls:
            picture_urls = get_picture_urls(albums_url)
            download_pictures(picture_urls)
        index = index + 1
