#/usr/bin/env python
# -*- coding: utf-8 -*-

'''
Created on 2011-9-19

@author: zhongfeng
'''
import sys,os
from dbproc.basedbproc import *

def createSiteCat(urls, siteName, catKeyFunc=getCatKey,saveFlag = True):
    siteId = getSiteIdByName(siteName)
    for urlsum in urls:
        parent = urlsum.parentPath
        path = []
        for pUrl in parent:
            rawCatId = catKeyFunc(pUrl.url)
            if pUrl.catagoryLevel == 0:
                id0 = getCatIdFromRawInfo(siteId, catUrl=pUrl.url)
                if id0 is None :
                    if saveFlag:
                        id0 = saveProdCat(rawCatId=rawCatId, siteId=siteId, parentId=0, url=pUrl.url,
                                         name=pUrl.name, parentPath=[0], level=pUrl.catagoryLevel)
                        print 'new cat :id is id0 %s,name:%s,url:%s,level:%s' % (id0,pUrl.name,pUrl.url,pUrl.catagoryLevel)
                    else:
                        print 'new cat :name:%s,url:%s,level:%s' % (pUrl.name,pUrl.url,pUrl.catagoryLevel)
                path.append(id0)
            elif pUrl.catagoryLevel == 1:
                id1 = getCatIdFromRawInfo(siteId, catUrl=pUrl.url)
                if id1 is None:
                    if saveFlag:
                        parentPath1 = [id0]
                        id1 = saveProdCat(rawCatId=rawCatId, siteId=siteId, parentId=id0, url=pUrl.url,
                                         name=pUrl.name, parentPath=parentPath1, level=pUrl.catagoryLevel)
                        print 'new cat :id is id1 %s,name:%s,url:%s,level:%s' % (id1,pUrl.name,pUrl.url,pUrl.catagoryLevel)
                    else:
                        print 'new cat :name:%s,url:%s,level:%s' % (pUrl.name,pUrl.url,pUrl.catagoryLevel)
                path.append(id1)
            elif pUrl.catagoryLevel == 2:
                id2 = getCatIdFromRawInfo(siteId, catUrl=pUrl.url)
                if id2 is None:
                    if saveFlag:
                        parentPath2 = [id0, id1]
                        id2 = saveProdCat(rawCatId=rawCatId, siteId=siteId, parentId=id1, url=pUrl.url,
                                         name=pUrl.name, parentPath=parentPath2, level=pUrl.catagoryLevel)
                        print 'new cat :id is id0 %s,name:%s,url:%s,level:%s' % (id2,pUrl.name,pUrl.url,pUrl.catagoryLevel)
                    else:
                        print 'new cat :name:%s,url:%s,level:%s' % (pUrl.name,pUrl.url,pUrl.catagoryLevel)
                path.append(id2)
                
        rawCatId = catKeyFunc(urlsum.url)
        id3 = getCatIdFromRawInfo(siteId, catUrl=urlsum.url)
        if id3 is None:
            if saveFlag:
                parentPath3 = path
                id3 = saveProdCat(rawCatId=rawCatId, siteId=siteId, parentId=parentPath3[-1], url=urlsum.url,
                                name=urlsum.name, parentPath=parentPath3, level=urlsum.catagoryLevel)
                print 'new cat :id is id0 %s,name:%s,url:%s,level:%s' % (id3,urlsum.name,urlsum.ur,urlsum.catagoryLevel)
            else:
                print 'new cat :name:%s,url:%s,level:%s' % (urlsum.name,urlsum.url,urlsum.catagoryLevel)

def getAllCatUrlSums(rootUrlSum, ParserClass, content = None,include = None,exclude = None):
    if content is None:
        from crawlerhttp import crawle
        while True:
            result = crawle(rootUrlSum)
            if result.code == 200:
                break
    content = result.content
    firstPage = ParserClass(content, rootUrlSum,include,exclude)
    urlSums = firstPage.parserSubUrlSums()
    return urlSums

def calEveryLevelCatNum(urlSums):
    s0 = set()
    s1 = set()
    s2 = set()
    s3 = set()
    for sort_3 in urlSums:
        print sort_3.name,sort_3.url
        print seEncode(match55bigoCats(8,sort_3.name))
        parentPath = sort_3.parentPath
        s0.add(parentPath[0].url)
        s1.add(parentPath[1].url)
        if len(parentPath) > 2:
            s2.add(parentPath[2].url)
        s3.add(sort_3.url)
    sa = set()
    import itertools
    for t in itertools.chain(s0, s1, s2, s3):
        sa.add(str(getMd5Key(t))[0:16])
    print len(sa)
    print len(s0), len(s1), len(s2), len(s3)

def testSiteCat(rootUrlSum,ParserClass,content = None,updateDb = False):

    urlSums = getAllCatUrlSums(rootUrlSum, ParserClass, content)
    calEveryLevelCatNum(urlSums)
    if updateDb:
        createSiteCat(urlSums,rootUrlSum.name)
        
def preProcCats(rootUrlSum,ParserClass,content = None):
    urlSums = getAllCatUrlSums(rootUrlSum, ParserClass, content)#获取网站当前的所有分类列表
    siteId = getSiteIdByName(rootUrlSum.name)
    noMatch = []
    unDecided = []
    for sort_3 in urlSums:
        id3 = getCatIdFromRawInfo(siteId, catUrl=sort_3.url)
        if id3 is not None:
            continue
        retArr = match55bigoCats(siteId,sort_3.name)
        if len(retArr) == 0:
            noMatch.append( '|'.join((sort_3.name,sort_3.url)))
            continue
        auRet = []
        for t in retArr:
            id,site_id,self_cat_id,name,url ,cat_base_id = t
            flag = 0
            if site_id == siteId:
                if url == sort_3.url:
                    flag = 1
                print '|'.join((sort_3.name,sort_3.url,name,str(id),str(flag),str(site_id),str(self_cat_id),str(cat_base_id)))
                break
            elif name == sort_3.name:
                print '|'.join((sort_3.name,sort_3.url,name,str(id),str(flag),str(site_id),str(self_cat_id),str(cat_base_id)))
                break
            else:
                auRet.append( '(%s,%s)' % (str(self_cat_id),name))
        if len(auRet) > 0:
            unDecided.append( '|'.join((sort_3.name,sort_3.url,name,str(id),str(flag),str(site_id),
                                        str(self_cat_id),str(cat_base_id), seEncode(auRet))))
    for newCat in noMatch:
        print newCat
    for unDeCat in unDecided:
        print unDeCat
                
#
from pageparser import getParser

def __buildCatagory(parserDict,root,content = None):
    parserClass = getParser(0, parserDict)

    preProcCats(root, parserClass,None)

def build360BuyCat():
    from j360buy.j360pageparser import parserDict,j360buyRoot
    __buildCatagory(parserDict,j360buyRoot)

def buildEfeihuCat():
    from efeihu.efeihupageparser import parserDict,efeihuRoot
    __buildCatagory(parserDict,efeihuRoot)

def buildLusenCat():
    from lusen.lusenpageparser import parserDict,lusenRoot
    __buildCatagory(parserDict,lusenRoot)

def buildGomeCat():
    from gome.gomepageparser import parserDict,gomeRoot
    __buildCatagory(parserDict,gomeRoot)

def buildDangDangCat():
    from dangdang.dangpageparser import parserDict,dangdangRoot
    __buildCatagory(parserDict,dangdangRoot)

def buildNewEggCat():
    from newegg.neweggpageparser import parserDict,newEggRoot
    __buildCatagory(parserDict,newEggRoot)
   
def buildSuningCat():
    from suning.suningparser import parserDict,sunningRoot
    __buildCatagory(parserDict,sunningRoot)

def buildIcsonCat():
    from icson.icsonpageparser import parserDict,icsonRoot
    curPath = os.path.abspath(os.path.dirname(sys.argv[0]))
    fileName = os.path.join(curPath, 'portal.html')
    with open(fileName, 'r') as fInput:
        content = fInput.read()
    __buildCatagory(parserDict,icsonRoot,content)
    
def buildCoo8Cat():
    from coo8.coo8pageparser import parserDict,coo8Root
    #===========================================================================
    # curPath = os.path.abspath(os.path.dirname(sys.argv[0]))
    # fileName = os.path.join(curPath, 'coo8_allcat.htm')
    # with open(fileName, 'r') as fInput:
    #    content = fInput.read()
    #===========================================================================
    __buildCatagory(parserDict,coo8Root)

def buildAmazonCat():
    from amazon.amazonpageparser import rootUrlSummary,parserDict
    from pageparser import ObuyUrlSummary
    parserClass = getParser(0, parserDict)
    include = [ObuyUrlSummary(name=name) for name in [u'home-appliances']]
    exclude = [ObuyUrlSummary(name=name) for name in [u'video', u'aps', u'stripbooks', u'music', u'apparel', u'electronics', u'audio-visual-education']]
    urlSumsSort1 = getAllCatUrlSums(rootUrlSum=rootUrlSummary,ParserClass=parserClass,exclude=exclude)
    ret = []
    for sort1 in urlSumsSort1:
        print sort1.url
        parserClass1 = getParser(1, parserDict)
        urlSumsSort2 = getAllCatUrlSums(sort1, parserClass1)
        ret.extend(urlSumsSort2)
    calEveryLevelCatNum(ret)
    createSiteCat(ret,rootUrlSummary.name)
    
def buildAmazonCat_New():
    from amazon.amazonpageparser import rootUrlSummary,parserDict
    from pageparser import ObuyUrlSummary
    parserClass = getParser(0, parserDict)
    include = [ObuyUrlSummary(name=name) for name in [u'appliances',u'communications',u'audio-visual',u'computers',u'office-products',
                                                                u'home-appliances',u'photo-video',u'music-players',u'automotive',u'software']]
    urlSumsSort1 = getAllCatUrlSums(rootUrlSum=rootUrlSummary,ParserClass=parserClass,include=include)
    ret = []
    for sort1 in urlSumsSort1:
        print sort1.name
        parserClass1 = getParser(1, parserDict)
        urlSumsSort2 = getAllCatUrlSums(sort1, parserClass1)
        for sort2 in urlSumsSort2:
            print '    %s' % sort2.name
            parserClass2 = getParser(2, parserDict)
            urlSumsSort3 = getAllCatUrlSums(sort2, parserClass2)
            if not urlSumsSort3:
                sort2.catagoryLevel = 3
                ret.append(sort2)
            else:
                for sort3 in urlSumsSort3:
                    print '        %s' % sort3.name
                ret.extend(urlSumsSort3)
    calEveryLevelCatNum(ret)

    createSiteCat(ret,rootUrlSummary.name)
  

def buildAllCat():
    build360BuyCat()
    buildGomeCat()
    buildDangDangCat()
    buildNewEggCat()
    buildSuningCat()
    buildIcsonCat()
    buildCoo8Cat()
    
if __name__ == '__main__':
    #from gome.gomepageparser import parserDict,gomeRoot
    #parserClass = getParser(0, parserDict)
    #urlSums = getAllCatUrlSums(rootUrlSum = gomeRoot, ParserClass=parserClass, content = None)
    #createSiteCat(urls = urlSums, siteName = gomeRoot.name, saveFlag = False)
    #build360BuyCat()
    #buildGomeCat()
    #buildSuningCat()
    #buildAmazonCat_New()
    #buildEfeihuCat()
    #buildLusenCat()
    buildCoo8Cat()
    
    