# -*- coding: utf-8 -*-

'''
author:Cooper
date:2012-3-30
整理淘宝API文档目录及相关资料
'''

import MyPyLib
from BeautifulSoup import BeautifulSoup
import re
import MySQLdb

_coding=__import__('sys').getfilesystemencoding()

_APIDocIndexUrl='http://open.taobao.com/\
doc/category_list.htm?id=102'

def getConn():
    db=MySQLdb.connect(host="localhost",
                       user="root",passwd="LTS98",
                       db="TAOBAOAPI",charset="utf8")
    return db

def generateSQLCmd(table,fl):
    for i,t in enumerate(fl):
        fl[i]=t.replace('\'','"').replace('&nbsp;',' ')
    s=','.join(['\'%s\''%t for t in fl])
    cmdStr='INSERT IGNORE INTO %s VALUES(%s,NOW())'%(table,s)
    return cmdStr

def getCatAPIInfo():
    '''
    get API list
    return a dict{API:[desc,link]}
    '''
    rd=dict()
    s=MyPyLib.httpRequest(_APIDocIndexUrl,0)
    soup=BeautifulSoup(s,fromEncoding='utf-8')
    t=soup.find('div',{'class':'contentlist-api'})
    for li in t.ul.findAll('li'):
        l=[li.p.text.encode('utf-8'),li.a['href'].encode('utf-8')]
        rd[li.a.text.encode('utf-8')]=l
    return rd

def saveCatAPIInfo():
    db=getConn()
    db.query('TRUNCATE TABLE catagory')
    db.commit()
    d=getCatAPIInfo()
    for t  in d:
        cmdStr=generateSQLCmd('catagory',[t]+d[t])
        db.query(cmdStr)
    db.commit()
    db.close()
    pass

def getDataStructList(soup):
    '''
    [apititle,datastruct,desc,link]
    '''
    t=None
    tt=soup.findAll('div',{'class':'sub-wrap'})
    for i,t2 in enumerate(tt):
        if t2.h3.text.encode('utf-8')=='数据结构列表':
            t=t2
            break
    rl=list()
    if t:
        apititle=soup.find('h1',{'class':'title'}).text
        for tr in t.div.findAll('p'):
            lt=[apititle,tr.a.text,None,tr.a['href']]
            lt[2]=tr.findAll('span')[-1].text
            lt=[t.encode('utf-8') for t in lt]
            rl.append(lt)
    return rl

def getDataStructListTest():
    d=getCatAPIInfo()
    #url=d[d.keys()[0]][1]
    for cat in d.keys()[:1]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getDataStructList(soup)
        for t in l:
            print (','.join(t)).decode('utf-8').encode(_coding)
    pass

def getAPIsList(soup):
    '''
    [apititle,api,open-bg,desc,link]
    '''
    t=None
    tt=soup.findAll('div',{'class':'sub-wrap'})
    for i,t2 in enumerate(tt):
        if t2.h3.text.encode('utf-8')=='API列表':
            t=t2
            break
    rl=list()
    if t:
        apititle=soup.find('h1',{'class':'title'}).text
        for tr in t.div.findAll('p'):
            lt=[apititle,tr.a.text,tr.s['class'],None,tr.a['href']]
            lt[3]=tr.findAll('span')[-1].text
            lt=[t.encode('utf-8') for t in lt]
            rl.append(lt)
    return rl

def getAPIsListTest():
    d=getCatAPIInfo()
    #url=d[d.keys()[0]][1]
    for cat in d.keys()[1:2]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getAPIsList(soup)
        for t in l:
            print (','.join(t)).decode('utf-8').encode(_coding)
    pass

def getDataStruct(url):
    '''
    {dataclass:fieldlist}
    -- [dataclassname,desc,field,type,private,sample,fielddesc]
    [dataclassname,desc,link,field,index,type,private,sample,fielddesc]
    '''
    s=MyPyLib.httpRequest(url,1)
    soup=BeautifulSoup(s,fromEncoding='utf-8')
    rl=list()
    rd=dict()
    dataname=soup.find('h1',{'class':'title'}).text
    desc=soup.find('p',{'class':'introduction'}).text
    for i,row in enumerate(soup.find('table',{'class':'api-table'}).tbody.findAll('tr')):
        lt=[dataname,desc,url]
        lt=lt+[td.text for td in row.findAll('td')]
        lt=[t.encode('utf-8') for t in lt]
        lt.insert(4,str(i+1))
        rl.append(lt)
        if not rd.has_key(dataname):
            rd[dataname]=[lt]
        else:
            rd[dataname].append(lt)
        if row.findAll('td')[1].a:
            if not rd.has_key(re.match(r'^\S+',
                                       row.findAll('td')[1].text).group()):
                rlt=getDataStruct('http://api.taobao.com/apidoc/'+\
                row.findAll('td')[1].a['href'])
                for key in rlt:
                    rd[key]=rlt[key]
    return rd

def getDataStructTest():
    d=getCatAPIInfo()
    #url=d[d.keys()[0]][1]
    for cat in d.keys()[2:3]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getDataStructList(soup)
        for t in l[:]:
            url=t[3]
            l2=getDataStruct(url)
            for t2 in l2:
                for t3 in l2[t2]:
                    print (','.join(t3)).decode('utf-8').encode(_coding)
    pass

def getAPI(soup):
    '''
    [api,info,desc,auth_type,apitesturl]
    '''
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    info=re.match(r'^\S+\s(?P<info>\S+)$',title).groups()[0]
    desc=soup.find('p',{'class':'title-intro'}).text
    auth_type=soup.find('h2',id='authorize').findNext('p').text
    apitesturl=soup.find('a',{'class':'tool-test'})['href']
    apitesturl='http://api.taobao.com'+apitesturl
    return [t.encode('utf-8') for t in [api,info,desc,auth_type,apitesturl]]

def getAPITest():
    d=getCatAPIInfo()
    for cat in d.keys()[:1]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getAPIsList(soup)
        for t in l[:1]:
            url=t[4]
            s=MyPyLib.httpRequest(url,1)
            soup=BeautifulSoup(s,fromEncoding='utf-8')
            l2=getAPI(soup)
            print ('\n'.join(l2)).decode('utf-8').encode(_coding)
    pass

def getReqResFormat(soup):
    '''
    [api,leveltype,field,type,required,sample,default,desc]
    leveltype:sys_params,app_params,response
    '''
    return getSysParams(soup)+getAppParams(soup)+getResFromat(soup)

def getReqResFormatTest():
    d=getCatAPIInfo()
    for cat in d.keys()[:1]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getAPIsList(soup)
        for t in l[:1]:
            url=t[4]
            s=MyPyLib.httpRequest(url,1)
            soup=BeautifulSoup(s,fromEncoding='utf-8')
            l2=getReqResFormat(soup)
            for t2 in l2:
                print ('\n'.join(t2)).decode('utf-8').encode(_coding)
    pass

def getSysParams(soup):
    rl=list()
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    leveltype='sys_params'
    table=soup.find('table',{'class':'api-table'})
    for tr in table.tbody.findAll('tr'):
        lt=[api,leveltype]
        lt=lt+[t.text for t in tr.findAll('td')]
        lt.insert(5,'')
        lt.insert(6,'')
        rl.append([t.encode('utf-8') for t in lt])
    return rl

def getAppParams(soup):
    rl=list()
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    leveltype='app_params'
    table=soup.findAll('table',{'class':'api-table'})[1]
    for tr in table.tbody.findAll('tr'):
        lt=[api,leveltype]
        lt=lt+[t.text for t in tr.findAll('td')]
        rl.append([t.encode('utf-8') for t in lt])
    return rl

def getResFromat(soup):
    rl=list()
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    leveltype='res_params'
    table=soup.findAll('table',{'class':'api-table'})[2]
    for tr in table.tbody.findAll('tr'):
        lt=[api,leveltype]
        lt=lt+[t.text for t in tr.findAll('td')]
        lt.insert(6,'')
        rl.append([t.encode('utf-8') for t in lt])
    return rl

def getErrorCode(soup):
    '''
    [api,errorcode,errordesc,solution]
    '''
    rl=list()
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    table=soup.find('h2',id='error-code').findNext('table')
    for tr in table.findAll('tr')[1:]:
        lt=[api]+[t.text for t in tr.findAll('td')]
        rl.append([t.encode('utf-8') for t in lt])
    return rl

def getErrorCodeTest():
    d=getCatAPIInfo()
    for cat in d.keys()[:1]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getAPIsList(soup)
        for t in l[:1]:
            url=t[4]
            s=MyPyLib.httpRequest(url,1)
            soup=BeautifulSoup(s,fromEncoding='utf-8')
            l2=getErrorCode(soup)
            for t2 in l2:
                print (','.join(t2)).decode('utf-8').encode(_coding)
    pass

def getFAQ(soup):
    '''
    [api,Q,A]
    '''
    rl=list()
    title=soup.find('div',{'class':'title-wrap'}).h1.text
    api=re.match(r'^\S+',title).group()
    table=soup.find('h2',id='FAQ').findNext('table')
    if table:
        for tr in table.findAll('tr'):
            lt=list()
            if tr['class']=='odd':
                lt.append(api)
                lt.append(tr.td.text)
                rl.append([t.encode('utf-8') for t in lt])
            elif tr['class']=='even':
                rl[-1].append(tr.td.text.encode('utf-8'))
    return rl

def getFAQTest():
    d=getCatAPIInfo()
    for cat in d.keys()[1:2]:
        url=d[cat][1]
        s=MyPyLib.httpRequest(url,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        l=getAPIsList(soup)
        for t in l[11:12]:
            url=t[4]
            s=MyPyLib.httpRequest(url,1)
            soup=BeautifulSoup(s,fromEncoding='utf-8')
            l2=getFAQ(soup)
            for t2 in l2:
                print (','.join(t2)).decode('utf-8').encode(_coding)
    pass

def run():
    '''
    1.catagory info
    2.DS list
    3.API list
    4.DS class
    5.API info
    6.ReqResFormat
    7.Error Code
    8.FAQ
    '''
    db=getConn()
    #catagory info
    db.query('TRUNCATE TABLE catagory')
    db.query('TRUNCATE TABLE apidslist')
    db.query('TRUNCATE TABLE apilist')
    db.query('TRUNCATE TABLE apidatastruct')
    db.query('TRUNCATE TABLE api')
    db.query('TRUNCATE TABLE reqresformat')
    db.query('TRUNCATE TABLE errorcode')
    db.query('TRUNCATE TABLE faq')
    db.commit()
    d=getCatAPIInfo()
    for t  in d:
        cmdStr=generateSQLCmd('catagory',[t]+d[t])
        db.query(cmdStr)
    db.commit()
    print 'success to save catagroy API data'
    
    for cat in d.keys()[:]:
        print cat.decode('utf-8').encode(_coding),\
        d[cat][0].decode('utf-8').encode(_coding)
        catUrl=d[cat][1]
        s=MyPyLib.httpRequest(catUrl,1)
        soup=BeautifulSoup(s,fromEncoding='utf-8')
        for l in getDataStructList(soup):
            print l[1].decode('utf-8').encode(_coding)
            #DS list
            cmdStr=generateSQLCmd('apidslist',l)
            db.query(cmdStr)
            #DS class
            url=l[3]
            dsClass=getDataStruct(url)
            for t in dsClass:
                for t2 in dsClass[t]:
                    cmdStr=generateSQLCmd('apidatastruct',t2)
                    db.query(cmdStr)
        db.commit()
        for l in getAPIsList(soup):
            print l[1].decode('utf-8').encode(_coding)
            #API list
            cmdStr=generateSQLCmd('apilist',l)
            db.query(cmdStr)
            url=l[-1]
            s=MyPyLib.httpRequest(url,1)
            soup=BeautifulSoup(s,fromEncoding='utf-8')
            #API
            cmdStr=generateSQLCmd('api',getAPI(soup))
            db.query(cmdStr)
            #ReqResFormat
            for t in getReqResFormat(soup):
                cmdStr=generateSQLCmd('reqresformat',t)
                db.query(cmdStr)
            #ERROR code
            for t in getErrorCode(soup):
                cmdStr=generateSQLCmd('errorcode',t)
                db.query(cmdStr)
            #FAQ
            for t in getFAQ(soup):
                cmdStr=generateSQLCmd('faq',t)
                db.query(cmdStr)
        db.commit()
    #db.commit()
    db.close()
    pass

if __name__=='__main__':
    #getAPIsListTest()
    #getDataStructListTest()
    run()
    print 'the end...'
    pass
