#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import urllib2
import xml.dom.minidom
import urlparse
import string
import re
import argparse

global dl_mode
global debug_mode
global try_sd
global ignore_tab

dl_mode = 1 # 0:not download(only generate index), 1:downlaod
debug_mode = 1 # 1:save server xml files
ignore_tab = set([u"每日最新",
u"最热套图",
])
try_sd = 1 # 1:if failed to download original img, will try thumbnail instread
#################################################
# Constant
the_host = "newxml.b0.upaiyun.com"
debug_dir = "_DEBUG"
#################################################
# temp var
black_list = set()
black_list_sd = set()
last_err = 0

class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
    def http_error_301(self, req, fp, code, msg, headers):
        result = urllib2.HTTPRedirectHandler.http_error_301(
            self, req, fp, code, msg, headers)
        result.status = code
        return result

    def http_error_302(self, req, fp, code, msg, headers):
        #result = urllib2.HTTPRedirectHandler.http_error_302(
        #    self, req, fp, code, msg, headers)
        #result.status = code

        #url = req.get_full_url()
        #black_list.append(urlparse.urlparse(url)[1])
        global last_err
        if headers.dict['location'].find('403') != -1:
            last_err = 302
        return


def getServerData(url):
    global last_err
    headers = {"User-Agent": "meinv6.0.2 qiu shou gou, zhi mai 502 wan ren min bi"}
    req = urllib2.Request(url, None,headers)
    red_handler = SmartRedirectHandler()
    opener = urllib2.build_opener(red_handler)
    last_err = 0
    try:
        r1 = opener.open(req,None,10)
        data = r1.read()
        r1.close()
    except IOError:
        print "Failed to connect:" + url
        data = None
    return data

def handleIndex(fhtml,dom1):
    fhtml.write("<!DOCTYPE html>\n<html>\n<head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><title>meitui</title></head>\n<body>\n")
    handleHeader(fhtml,dom1.getElementsByTagName("up")[0])
    handleTabs(fhtml, dom1.getElementsByTagName("tabs")[0])
    fhtml.write("</body>\n</html>\n")

def getText(ele):
    return ele.childNodes[0].data

def utf8(x):
    return unicode(x).encode('utf-8')

def vldfilename(x):
    return re.sub(r'[?\\/*"<>|:]','_',x)

def handleHeader(fhtml,header):
    fhtml.write("<p>Updated: "+utf8(getText(header.getElementsByTagName(u"d")[0]))+"</p>\n");
    fhtml.write("<p>Version: "+utf8(getText(header.getElementsByTagName(u"v")[0]))+"</p>\n")

def handleTabs(fhtml, tabs):
    for tab in tabs.getElementsByTagName(u"t"):
        handleTab(fhtml, tab)

def handleTab(fhtml, tab):
    tab_name = vldfilename(string.strip( getText(tab.getElementsByTagName("n")[0])))
    if tab_name in ignore_tab: return
    fhtml.write("<h1>"+utf8(tab_name)+"</h1>\n")
    if dl_mode:
        try:
            if False == os.path.isdir(tab_name):
                os.mkdir(tab_name)
        except OSError:
            print ("mkdir failed: "+tab_name)
            return
    major = getText(tab.getElementsByTagName("i")[0])
    minor_rng = int(getText(tab.getElementsByTagName(u"sb")[0]))
    for minor in range(1,minor_rng+1):
        path = "/3/" + major + "_" + str(minor) +".xml"
        data = getServerData("http://"+the_host+path)
        if data == None:
            continue
        if debug_mode:
            with open(debug_dir+ os.sep+major + "_" + str(minor) +".xml", 'wb') as file1:
                file1.write(data)
        dom1 = xml.dom.minidom.parseString(data)
        list = dom1.getElementsByTagName("list")[0]
        for album in list.childNodes:
            handleSingleAlbum(fhtml, album, tab_name)

def handleSingleAlbum(fhtml,album, tab_name):
    global black_list
    global black_list_sd
    global try_sd
    if album.nodeType != xml.dom.Node.ELEMENT_NODE: return
    album_name = vldfilename(string.strip( getText(album.getElementsByTagName("i")[0])))
    fhtml.write("<h2>"+utf8(album_name)+"</h2>\n")
    if dl_mode:
        dir_dl = tab_name+os.sep+album_name
        try:
            if False == os.path.isdir(dir_dl):
                os.mkdir(dir_dl)
        except OSError:
            print ("mkdir failed: "+ dir_dl)
            return
    count = int(getText(album.getElementsByTagName("c")[0]))
    url_base = getText(album.getElementsByTagName("u")[0])
    for id in range(count):
        url_src = url_base + str(id) + ".JPG"
        url_src_sd = url_src + "!sd"
        href = "<a href=" +utf8(url_src)+ ">" + str(id) + "</a>\n"
        if dl_mode:
            file_dst = (dir_dl+os.sep+str(id)+".JPG")
            file_dst_sd = (dir_dl+os.sep+str(id)+"_sd.JPG")
            if False == os.path.isfile(file_dst):
                if download_img(url_src, file_dst, black_list):
                    href = "<a href=\"" +utf8(tab_name+os.sep+album_name)+os.sep+str(id)+".JPG\""+ ">" + str(id) + "</a>\n"
                    if True == os.path.isfile(file_dst_sd):
                        try:
                            os.remove(file_dst_sd)
                        except OSError:
                            print "delete failed: "+file_dst_sd
                elif try_sd:
                    if False == os.path.isfile(file_dst_sd):
                        if download_img(url_src_sd, file_dst_sd, black_list_sd):
                            href = "<a href=\"" +utf8(tab_name+os.sep+album_name)+os.sep+str(id)+"_sd.JPG\""+ ">" + str(id) + "</a>\n"
                    else:
                        href = "<a href=\"" +utf8(tab_name+os.sep+album_name)+os.sep+str(id)+"_sd.JPG\""+ ">" + str(id) + "</a>\n"
                        print "Skip: "+url_src_sd
            else:
                href = "<a href=\"" +utf8(tab_name+os.sep+album_name)+os.sep+str(id)+".JPG\""+ ">" + str(id) + "</a>\n"
                print "Skip: "+url_src
        fhtml.write(href)

def download_img(url_src,file_dst,b_list):
    if urlparse.urlparse(url_src)[1] not in b_list:
        print "Downloading: "+url_src
        img = getServerData(url_src)
        if img != None:
            try:
                with open(file_dst, 'wb') as file:
                    file.write(img)
                return True
            except IOError:
                print 'Failed to write: '+file_dst
                return False
        else:
            if last_err == 302:
                b_list.add(urlparse.urlparse(url_src)[1])
            return False
    else:
        print "Blacklist: "+url_src
        return False
##############################################################################

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Download meitui.mobi img.')
    parser.add_argument('--index_only', action='store_true',help='generate index.html only')
    parser.add_argument('--no_debug_mode', action='store_true',help='debug mode will store some temp file')
    parser.add_argument('--no_sd', action='store_true',help='sd will download thumbnail if download org img failed')
    args = parser.parse_args()
    debug_mode = (not args.no_debug_mode)
    try_sd     = (not args.no_sd)
    dl_mode    = (not args.index_only)


    data1 = getServerData("http://"+the_host+"/3/tab.xml")
    if data1==None:
        print("Failed to read tab.xml")
        exit
    if debug_mode:
        if False == os.path.isdir(debug_dir):
                os.mkdir(debug_dir)
        with open(debug_dir+os.sep+'tab.xml', 'wb') as file:
            file.write(data1)
    dom1 = xml.dom.minidom.parseString(data1)
    with open("index.html", "w") as fhtml:
        handleIndex(fhtml,dom1)
    print "Done"
