# -*- coding: utf-8 -*-
'''
Created on Dec 21, 2010

@author: HoangNamHai

Database: Cafef
Path: 
'''
import urllib
import workerpool
import hashlib
import datetime
import time
import os
import re
import lxml.html
import cStringIO as StringIO
import sys
import cookielib
from mechanize import Browser
from urllib import urlretrieve
from lxml import etree
from pymongo import Connection
from urlparse import urljoin
import logging
from mongolog.handlers import MongoHandler

localFilepath = '/home/hoangnamhai/HarvestedData/cafef/'
stringify = etree.XPath("string()")

connection = Connection('localhost', 27017)
db = connection["cafef"]
collection = db["thongtincongty"]

log = logging.getLogger('cafef')
log.setLevel(logging.DEBUG)
log.addHandler(MongoHandler.to('mongolog', 'log'))

warning_if_less = 100   # neu so tu cua bai viet < 100 thi warning


def getElementText(elem):
    if elem == None: return ''
    if elem.text == None: return ''
    return stringify(elem).strip()

def buildTree(url, outputHTML=False):
    result = urllib.urlopen(url)
    html = result.read()
    if outputHTML: print html
    parser = etree.HTMLParser(encoding='utf-8')
    tree = etree.parse(StringIO.StringIO(html), parser)
    return tree

def getMD5Hash(textToHash=None):
    return hashlib.md5(textToHash).hexdigest()

def getMd5Path(stringToHash):
    s = getMD5Hash(stringToHash)
    s = '{0}/{1}/{2}/'.format(s[0], s[1], s[2])
    return s

def getMd5FileName(stringToHash):
    s = getMD5Hash(stringToHash)    
    s = '{0}/{1}/{2}/{3}'.format(s[0], s[1], s[2], s[3:])
    return s

def saveImage(url, maCK):
    ''' Lưu ảnh xuống local với tên dựa file local dựa vào hash Md5. Nếu local đã có file rồi thì 0 load nữa. '''
    try:
        path = localFilepath + maCK + "/"
        localFilename = '{0}{1}.jpg'.format(path, getMd5FileName(url))
        print 'saveImage: {0} -> {1}'.format(url, localFilename)
        if not os.path.isfile(localFilename):
            if not os.path.exists(path + getMd5Path(url)):
                os.makedirs(path + getMd5Path(url))
            urlretrieve(url, localFilename)
        return localFilename
    except:
        print 'saveImage error: {0}'.format(url)
        print sys.exc_info()[1]
        pass
    return ''

def wordCount(str):
    wordNum = 0
    for line in str.strip().split('\n'):
        line = re.sub(r"\s+", ' ', line)
        wordNum += len(line.split(' '))
    return wordNum

def getListCongTy():
    br = Browser()
    cj = cookielib.LWPCookieJar()
    br.set_cookiejar(cj)
    br.set_handle_referer(True)
    br.set_handle_redirect(True)
    br.set_handle_robots(False)
    br.addheaders = [("Referer", "http://cafef.vn/du-lieu.chn")]
    br.open("http://solieu6.vcmedia.vn/ProxyHandler.ashx?RequestName=CompanyInfo&CallBack=cafef_ds_cong_ty_niem_yet.OnLoaded&RequestType=json&TradeId=-2&IndustryId=0&Keyword=&PageIndex=1&PageSize=654&Type=1")
    preg = re.compile(r"(\[.+\])")
    m = preg.search(br.response().read())
    if m != '': return eval(m.group(1))

def processDothi(maCK):
    log.debug("Start processDothi, param: {0}".format(maCK))
    chartTypes=['7days', '1month', '3months', '6months', '1year']
    results = []
    for chartType in chartTypes:
        imageURL = 'http://cafef.vn/FinanceStatementData/{0}/{1}.png'.format(maCK, chartType)
        saveImage(imageURL, maCK)
        results.append({'imageURL': imageURL, 'chartType': chartType})
    return results

def processThongTinChung(maCK, sanGD):
    log.debug("Start processThongTinChung, param: {0} {1}".format(maCK, sanGD))
    url = 'http://cafef.vn/{0}/thong-tin-chung/{1}-hnh.chn'.format(sanGD, maCK)  
    log.debug("Start url, param: {0}".format(url))
    global warning_if_less
    wordsNum = 0
    try:
        print '\n\nprocessThongTinChung: ', url
        tree = buildTree(url)
    #    listLabel=['Nhóm ngành', 'Vốn điều lệ', 'KL CP đang niêm yết', 'KL CP đang lưu hành']
        myXpath = '//span[contains(.,"Giới thiệu")]/following-sibling::*'.decode('utf-8')
        results = []
        i = -1
        flag = False
        elementNodes = tree.xpath(myXpath)
        for elem in elementNodes:
            i += 1   
            if flag:
                flag = False
                continue
            if elem.tag != 'style' and elem.tag != 'link':
                if elem.tag == 'span' and elementNodes[i+1].tag == 'a':
                    flag = True
                    t = lxml.html.fromstring(etree.tostring(elem))
                    textOut = t.text_content().strip()
                    textOut = re.sub("\s+", " ", textOut)
                    myText = textOut
                    t = lxml.html.fromstring(etree.tostring(elementNodes[i+1]))
                    textOut = t.text_content().strip()
                    textOut = re.sub("\s+", " ", textOut)
                    myText += textOut
                    if myText != '':
                        results.append({'type': 'text', 'data': myText})
                        wordsNum += wordCount(myText)
                else:            
                    t = lxml.html.fromstring(etree.tostring(elem))
                    textOut = t.text_content().strip()
                    textOut = re.sub("\s+", " ", textOut)
                    if textOut != '':
                        results.append({'type': 'text', 'data': textOut})
                        wordsNum += wordCount(textOut)
        
    except:
        err_str = "{0} --> url: {1}".format(sys.exc_info()[1], url)
        print err_str
        log.error(err_str)
    if wordsNum < warning_if_less: log.warn("Bai viet co so tu < {0}, url: {1}".format(warning_if_less, url))
    return results

def processMaChungKhoan(maCK, sanGD, tenCty):
    log.debug("Start processMaChungKhoan, param: {0}, {1}, {2}".format(maCK, sanGD, tenCty))
    global collection
    data = {}
    data['_id'] = getMD5Hash(maCK)
    data['maCK'] = maCK
    data['tenCty'] = tenCty
    data['sanGD'] = sanGD
    url = 'http://cafef.vn/{0}/{1}-hnh.chn'.format(sanGD, maCK)
    print 'processMaChungKhoan: ', url
    try:
        tree = buildTree(url)
        listLabel=['EPS 4 quý gần nhất(nghìn đồng)', 'P/E', 'Giá trị sổ sách /cp(nghìn đồng)',
                   'Hệ số beta', 'KLGD khớp lệnh trung bình 10 phiên', 'KLCP đang lưu hành',
                   'Vốn hóa thị trường(tỷ đồng)']
        
        contents = []
        for dataLabel in listLabel:
            myXpath = ("//td[contains(.,'" +  dataLabel +"')][@class='cacchiso_td2_1']/following-sibling::td").decode('utf-8')
            myText = dataLabel + ':' + getElementText(tree.xpath(myXpath)[0])
            contents.append({'type': 'text', 'data': myText})
                            
        listLabel2=['Ngày giao dịch đầu tiên', 'Giá đóng cửa phiên GD đầu tiên', 'Khối lượng cổ phiếu niêm yết lần đầu']
        for dataLabel in listLabel2:
            myXpath = ("//div[contains(.,'" + dataLabel + "')]/span[contains(@style,'bold')]").decode('utf-8')
            myText = dataLabel + ':' + getElementText(tree.xpath(myXpath)[0])
            contents.append({'type': 'text', 'data': myText})
        
        data['stat'] = contents
        data['chart'] = processDothi(maCK)
        data['content'] = processThongTinChung(maCK, sanGD)
        data['lastupdate'] = str(datetime.datetime.now())
        data['timeStamp'] = str(time.time())
        collection.save(data)
    except:
        err_str = "{0} --> url: {1}".format(sys.exc_info()[1], url)
        print err_str
        log.error(err_str)
        
    return

def process(dataCongty):
    log.debug("Start process, param: {0}".format(dataCongty))
    processMaChungKhoan(dataCongty['Symbol'], dataCongty['TradeCenter'].lower(), dataCongty['CompanyName'])


listCongty = getListCongTy() 
print "\nTong so cong ty: ", len(listCongty)
log.debug("Start crawler cafef thongtincty")
pool = workerpool.WorkerPool(size=10)
pool.map(process, listCongty)
pool.shutdown()
pool.wait()

print '\n\n\n\nFinished !'
log.info("Tong so cty: {0}".format(len(listCongty)))
log.debug("crawler cafef thongtincty finished")
sys.exit()



