# -*- coding: utf-8 -*-

import mechanize
from mechanize import Browser
from pymongo import Connection
import zlib
import datetime
import sys
import logging
from mongolog.handlers import MongoHandler
import re
import cStringIO as StringIO
import lxml.html
from lxml import etree
import workerpool
from mechanize import urlretrieve
from urlparse import urljoin
import os
import hashlib
import urllib

# <!--- basic function --
def getCRC32Unsign(textToHash=None):
    return str(zlib.crc32(textToHash) & 0xffffffffL)

def getMD5Hash(textToHash=None):
    return hashlib.md5(textToHash).hexdigest()

def getMd5Path(stringToHash):
    s = getMD5Hash(stringToHash)
    s = '{0}/{1}/{2}/'.format(s[0], s[1], s[2])
    return s

def getMd5FileName(stringToHash):
    s = getMD5Hash(stringToHash)    
    s = '{0}/{1}/{2}/{3}'.format(s[0], s[1], s[2], s[3:])
    return s

def buildTree(url):
    try:
        ua6300 = "Nokia6300/2.0 (04.20) Profile/MIDP-2.0 Configuration/CLDC-1.1 UNTRUSTED/1.0"
        br = Browser()
        br.addheaders = [("User-Agent", ua6300)]
        br.open(url)
        html = br.response().read()
        parser = etree.HTMLParser(encoding='utf-8')
        tree = etree.parse(StringIO.StringIO(html), parser)
        return tree
    except:
        print sys.exc_info()
        print url
        pass

def getElementText(elem):
    if elem is None: return ''
    t = lxml.html.fromstring(etree.tostring(elem))
    return t.text_content().strip()

def getAttributeText(node, attrb):
    if node is None or attrb == '': return ''
    return node.get(attrb)

def extractWithRegEx(pat, matchStr, matchIdx):
    ''' Hàm chạy regex và trả kết quả ở group[matchIdx]. Nếu không có kết quả -> trả về null'''
    try:
        result = ''
        rexp = re.compile(pat)
        m = rexp.search(matchStr)
        if (m!=''):
            result = m.group(matchIdx)
        return result
    except:
        return ''

def strToASCII(str):
    if str == '': return ''
    try:
        listPattern = [r"á|à|ả|ạ|ã|â|ấ|ầ|ẩ|ậ|ẫ|ă|ắ|ằ|ẳ|ặ|ẵ", r"Á|À|Ả|Ạ|Ã|Â|Ấ|Ầ|Ẩ|Ậ|Ẫ|Ă|Ắ|Ằ|Ẳ|Ặ|Ẵ",
                       r"đ", r"Đ", r"í|ì|ỉ|ị|ĩ", r"Í|Ì|Ỉ|Ị|Ĩ", r"é|è|ẻ|ẹ|ẽ|ê|ế|ề|ể|ệ|ễ", r"É|È|Ẻ|Ẹ|Ẽ|Ê|Ế|Ề|Ể|Ệ|Ễ",
                       r"ó|ò|ỏ|ọ|õ|ô|ố|ồ|ổ|ộ|ỗ|ơ|ớ|ờ|ở|ợ|ỡ", r"Ó|Ò|Ỏ|Ọ|Õ|Ô|Ố|Ồ|Ổ|Ộ|Ỗ|Ơ|Ớ|Ờ|Ở|Ợ|Ỡ",
                       r"ú|ù|ủ|ụ|ũ|ư|ứ|ừ|ử|ự|ữ", r"Ú|Ù|Ủ|Ụ|Ũ|Ư|Ứ|Ừ|Ử|Ự|Ữ", r"ý|ỳ|ỷ|ỵ|ỹ", r"Ý|Ỳ|Ỷ|Ỵ|Ỹ"]
        rep = ['a', 'A', 'd', 'D', 'i', 'I', 'e', 'E', 'o', 'O', 'u', 'U', 'y', 'Y']
        str = str.encode('utf-8', 'replace')
        for idx in range(len(listPattern)):
            str = re.sub(listPattern[idx], rep[idx], str)
        return str
    except:
        pass

def saveImage(url, lv=0):
    ''' Lưu ảnh xuống local với tên dựa file local dựa vào hash Md5. Nếu local đã có file rồi thì 0 load nữa. '''
    try:
        lv += 1
        localFilename = '{0}{1}.jpg'.format(localFilepath, getMd5FileName(url))
        print 'saveImage: {0} -> {1}'.format(url, localFilename)
        if not os.path.isfile(localFilename):
            if not os.path.exists(os.path.dirname(localFilename)):
                os.makedirs(os.path.dirname(localFilename))
            urlretrieve(url, localFilename)
        return localFilename
    except:
        if lv < 2:  saveImage(url, lv)
        err_str = 'saveImage error: {0} >> {1}'.format(url, sys.exc_info()[1])
        log.error(err_str)
        pass
    return ''  

# -- end basic function --!>

def processImage(cat):
    log.debug("Start processImage({0})".format(cat))
    url = "http://www.upanh.com/mobile/cat/{0}_background".format(cat[0])
    tree = buildTree(url)
    try:
        connection = Connection('localhost', 27017)
        db = connection['upanh']
        collection = db['hinhnen']
        for item in tree.xpath("//ul[@class='image']/li/a/img"):
            imgURL = urljoin(url, item.get('src'))
            saveImage(imgURL)
            id = getCRC32Unsign(imgURL)
#            print imgURL
            collection.save({'_id': id, 'link': imgURL, 'cat': cat[1]})
    except:
        err_str = "{0}>> {1}".format(sys.exc_info[1], cat[1])
        log.error(err_str)
        print err_str
        pass
    
if __name__ == '__main__':
    localFilepath = "/home/hoangnamhai/HarvestedData/upanh/"
    log = logging.getLogger('upanh_com')
    log.setLevel(logging.INFO)
    log.addHandler(MongoHandler.to('mongolog', 'log'))
    
    categoryT1 = [[1, 'Lễ hội'], [2, 'Người đẹp'], [3, 'Ca nhạc'], [4, 'Phim ảnh'], [5, 'Thiên nhiên'], [6, 'Vui nhộn'], [7, 'Hoạt hình'], 
                  [8, 'Tình yêu'], [9, 'Động vật'], [10, 'Nghệ thuật'], [11, 'Xe cộ'], [12, 'Thể thao'], [13, 'Game'], [14, 'Khác'], 
                  [15, 'Made by you'], [16, 'Soo'], [17, 'SeoHuyn']]
    
    log.info("Start crawler upanh.com")
    pool = workerpool.WorkerPool(size=5)
    pool.map(processImage, categoryT1)
    pool.shutdown()
    pool.wait()
    log.info("Crawler upanh.com finished")
    
    print ">>> Finished at {0}".format(str(datetime.datetime.now()))
    sys.exit()

    
    