# -*- coding: utf-8 -*-
'''
author LongHoangGiang
created at Thu May 12 19:40
'''
import os, re, datetime, sys, time 
import workerpool, crawlerCommon, traceback
from MongoModel import MongoModel
from urlparse import urljoin

class MuaChung(MongoModel):
    
    cities = {
              'ha-noi': 'Hà Nội', 
              'tp-ho-chi-minh': 'TP Hồ Chí Minh', 
              'hai-phong': 'Hải Phòng', 
              'da-nang': 'Đà Nẵng'
    }
    
    def __init__(self, host='localhost', port=27017):
        MongoModel.__init__(self, host, port)

    def __del__(self):
        del MuaChung.cities
        MongoModel.__del__(self)
    
    def parseJsItem(self, jsStr):
        '''shop.product.init(444,1305169800,1305435540,423,700,'11h59 ngày 15/05/2011',140,'Phiếu',false,0);'''
        pat = r"init\(\d+,\d+,\d+,(\d+),(\d+),'(.+)',(\d+).+\);"
        currentBought = crawlerCommon.extractWithRegEx(pat, jsStr, 1)
        totalProduct = crawlerCommon.extractWithRegEx(pat, jsStr, 2)
        dateExpired = crawlerCommon.extractWithRegEx(pat, jsStr, 3)
        t = datetime.datetime.strptime(dateExpired, "%Hh%M ngày %d/%m/%Y")
        return currentBought, totalProduct, t.strftime("%d/%m/%Y %H:%M:%S")
    
    def description(self, node, xpath, remove_xpath):
        if node is None: return []
        data = []
        elemRemove = []
        for rxpath in remove_xpath:
            for item in node.xpath(rxpath):
                elemRemove.append(item) 
        for elem in node.xpath(xpath):
            flag = False
            for item in elemRemove:
                if elem == item:
                    flag = True
            if flag: continue
            text = crawlerCommon.getElementText(elem, extract_text=True)
            if text != '':
                data.append({'type': 'text', 'data': text})
        return data
    
    def content(self, url, xpath, path):
        data = []
        try:
            tree = crawlerCommon.buildTree(url)
            for elem in tree.xpath(xpath):
                flag = False
                for item in elem.xpath("./descendant-or-self::*"):
                    if item.tag == 'img':
                        src = urljoin(url, crawlerCommon.getAttrib(item, 'src'))
                        crawlerCommon.saveImage(src, path)
                        data.append({'type': 'image', 'data': src})
                        flag = True
                if not flag:
                    text = crawlerCommon.getElementText(elem, extract_text=True)
                    if text != '':
                        data.append({'type': 'text', 'data': text})
        except:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback)
        return data
                
    def listProduct(self, url, citiId):
        try:
            tree = crawlerCommon.buildTree(url)
            for elem in tree.xpath("//div[contains(@id, 'productItem')]"):
                title = crawlerCommon.getElementText(elem.xpath(".//div[@class='titleBox']//a/span"))
                detailLink = urljoin(url, crawlerCommon.getAttrib(elem.xpath(".//div[@class='titleBox']//a"), 'href'))
                id = crawlerCommon.getCRC32Unsign(detailLink)
                mainPrice = re.sub(r'[^\d]', '', crawlerCommon.getElementText(elem.xpath(".//div[@class='mainPrice']//div[@class='numPrice']")))
                originalPrice = re.sub(r'[^\d]', '', crawlerCommon.getElementText(elem.xpath(".//div[contains(@class, 'leftBottomPrice')]//b/strike")))
                thumbnail = crawlerCommon.getAttrib(elem.xpath(".//div[@class='mainBoxBorder']"), 'style')
                thumbnail = urljoin(url, crawlerCommon.extractWithRegEx(r"url\('(.+)'\)", thumbnail, 1)) 
                crawlerCommon.saveImage(thumbnail, localFilePath)
                scriptElem = elem.xpath("./../following-sibling::*[name()='script']")
                jsStr = crawlerCommon.getElementText(scriptElem)
                currentBought, totalProduct, dateExpired = self.parseJsItem(jsStr)
                description = self.description(elem, ".//div[@class='contentMainBox']/div/*", 
                                               [".//div[@class='contentMainBox']/div/div[@class='more fr']"])
                content = self.content(detailLink, "//div[@class='contentMain']//div[@class='blueTitleDetail']/div[2]/*", localFilePath)
                if len(content) < 1: continue
                timestamp = time.time()
                lastupdate = datetime.datetime.now().strftime("%d/%m/%Y %H:%M:%S")
                db = self.connection['groupon']
                collection = db['muachung']
                collection.save({
                    '_id': id,
                    'title': title,
                    'expiredDate': dateExpired,
                    'detailLink': detailLink,
                    'saleoffPrice': mainPrice,
                    'basePrice': originalPrice,
                    'totalProduct': totalProduct,
                    'currentBuyer': currentBought,
                    'thumbnail': thumbnail,
                    'description': description,
                    'detail': content,
                    'timestamp': timestamp,
                    'lastupdate': lastupdate,
                    'city': self.cities[citiId]
                })
                
                print('----------------------------------')
                print(id)
                print("%s -- %s" % (title, detailLink))
                print("main price: %s" % mainPrice)
                print("original price: %s" % originalPrice)
                print("thumbnail: %s" % thumbnail)
                print("total: %s" % str(totalProduct))
                print("current: %s" % str(currentBought))
                print("expired: %s" % dateExpired)
                
                del content
                del description
        except:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback)
            
    def process(self, citiId):
        url = "http://muachung.vn/%s" % citiId
        self.listProduct(url, citiId)
        
if __name__ == '__main__':
    localFilePath = "/home/longhoanggiang/crawler/files/"
    mc = MuaChung()
    pool = workerpool.WorkerPool(size=5)
    pool.map(mc.process, mc.cities.keys())
    pool.shutdown()
    pool.wait()
    print('>> Finished')
    os._exit(1)
    
    

