#coding=utf8
import urllib2, re, urlparse
import cmrhelper

''' pictures parser '''
'''format: var picurl="/ok-comic02/kt2/SuzumiyaHaruhi/vol_02/99770_001XXKwXuG.jpg"
var datas=93;var picurl="/ok-comic07/y/KAMIYOMI/vol_01/99770_001mjSfFFg.jpg";var picurl1="/ok-comic07/y/KAMIYOMI/vol_01/99770_002sEgAfDV.jpg";var ComicListID=28834;var ComicID=3739;
'''
pagenumre = re.compile(r'datas=(\d+);')
pagere = re.compile(r'^http.*?s=(\d*\b)')
# re.compile(r'(^http.*)(\?v=)\d(\*s=(\d*\b))')
picre = re.compile(r'var picurl=\"(.*?)\"')
hostre = re.compile(r'(ServerList\[\d*\]=.*?);')
# urly code...
ServerList = ['']*20
HostID = 0

def getserverlist(url):
    global ServerList
    url = urlparse.urljoin(url,'/haodm/dmjs/ove.js')
    content = urllib2.urlopen(url).read()
    l = hostre.findall(content)
    if l:
        for item in l:
            exec(item)
        return True
    else:
        return False

def getpageurls(url,content):
    "get each url of pages"
    '''urlformat: http://dm.99770.com/Comic/1274/12066/?v=1*s=4'''
    global HostID,ServerList
    pagelist = []
    if pagenumre.findall(content)[0].isdigit():
        pagelist.append(url)
        pagenum = int(pagenumre.findall(content)[0])
        if pagere.findall(url):
            HostID = int(pagere.findall(url)[0])
            print 'hostid: ',HostID
            for i in range(2,pagenum):
                pagelist.append(urlparse.urljoin(url,str(i) + '.htm?v=' + str(i) + 's=' + str(HostID)))
        # print '\n'.join(pagelist)
        return pagelist
    else:
        return None

def parsepic(url):
    content = urllib2.urlopen(url).read()
    if content:
        if picre.findall(content):
            return urlparse.urljoin(ServerList[HostID-1],picre.findall(content)[0])
    else:
        return None

def getpiclist(url,title='unamed',mylog=None):
    "parser html source and js codes to generate piclist"
    pagelist = []
    piclist = []
    content = ''
    global ServerList
    try:
        getserverlist(url)
        content = urllib2.urlopen(url).read()
        pagelist = getpageurls(url,content)
        for item in pagelist:
            picurl = parsepic(item)
            piclist.append([picurl,picurl,title])
        return piclist
    except  Exception, e:
        mylog.writeLog(str(e),'error')
