# -*- coding: utf-8 -*-

import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
import commonlib
import re
import datetime
import time
import traceback
import workerpool
import mechanize
import pprint
from pymongo.objectid import ObjectId
from MongoModel import MongoModel
from workerpool import WorkerPool

MONGO_SERVER = 'beta.mana.vn'
MONGO_PORT = 27017

logger = commonlib.getLogger('tracnghiemtienganh')

class Crawler(MongoModel):
    
    url = 'http://tracnghiemtienganh.com/'
    dbName = 'tracnghiemtienganh'
    
    def __init__(self, host, port):
        MongoModel.__init__(self, host, port)
    
    def getCategory(self):
        logger.debug('call getCategory')
        categories = []
        try:
            tree = commonlib.getXMLTree(self.url)
            if tree == None: return
            for item in tree.xpath("//div[@id='left-sidebars']//li[contains(@class, 'cat-item')]/a"):
                link = commonlib.getAttribText(item, 'href')
                if link != '': commonlib.urlJoin(self.url, link)
                name = commonlib.getElementText(item)
                print "%s - %s" % (name, link)
                if name != '':
                    categories.append({'name': name, 'link': link})
        except:
            logger.error(traceback.format_exc())
        return categories
    
    def submit(self, url):
        logger.debug('call submit with url={0}'.format(url))
        html = ''
        try:
            br = mechanize.Browser()
            br.open(url)
            br.select_form(nr=1)
            html = br.submit().read()
        except:
            logger.error(traceback.format_exc())
        finally:
            return html
    
    def checkCat(self, name):
        logger.debug('call checkCat(%s)' % name)
        oid = None
        try:
            db = self.connection[self.dbName]
            collection = db['category']
            row = collection.find_one({'name': name})
            if row: oid = row['_id']
            else:
                collection.save({'name': name})
                row = collection.find_one({'name': name})
                if row: oid = row['_id']
        except:
            logger.error(traceback.format_exc())
        finally:
            return oid
    
    def getTestOfCategory(self, cat):
        logger.debug('call getTestOfCategory')
        try:
            db = self.connection[self.dbName]
            collection = db['article']
            catId = self.checkCat(cat['name'])
            if catId == None: return
            url = cat['link']
            while url != '':
                html = self.submit(url)
                if html == '': break 
                tree = commonlib.buildTreeFromHTML(html)
                if tree == None: return
                postUrl = commonlib.getAttribText(tree.xpath("//div[@id='post-entry']/div[@class='post-meta']/h1/a"), 'href')
                data = []
                correctAnswer = []
                for iquestion in tree.xpath("//div[@class='show-question']"):
                    question = commonlib.stringify(iquestion.xpath(".//div[@class='show-question-content']"))
                    answers = []
                    for ianswer in iquestion.xpath(".//li[contains(@class, 'answer')]"):
                        answer = commonlib.getElementText(ianswer)
                        if 'correct-answer' in commonlib.getAttribText(ianswer, 'class'):
                            answers.append({'text': answer, 'iscorrect': True})
                            correctAnswer.append(answer)
                        else:
                            answers.append({'text': answer, 'iscorrect': False})
                    data.append({'question': question, 'answers': answers})
                pprint.pprint(data)
                pprint.pprint(correctAnswer)
                if postUrl != '':
                    hashUrl = commonlib.getMD5Hash(postUrl)
                    if not collection.find_one({'hashUrl': hashUrl}):
                        collection.save({
                            'catId': ObjectId(catId),
                            'data': data,
                            'correct': correctAnswer,
                            'hashUrl': hashUrl,
                            'lastupdate': datetime.datetime.now(),
                            'timestamp': time.time()
                        })
                    else:
                        logger.info('This english test already exist in database')
                nextPageNode = tree.xpath("//div[@class='wp-pagenavi']//span[@class='current']/following-sibling::*[1]")
                if nextPageNode == None: break
                if len(nextPageNode) == 0: break
                href = commonlib.getAttribText(nextPageNode[0], 'href')
                url = commonlib.urlJoin(url, href) if href != '' else ''
        except:
            logger.error(traceback.format_exc())
    
    def process(self):
        logger.debug('call process')
        data = self.getCategory()
        try:
            pool = workerpool.WorkerPool(size=5)
            pool.map(self.getTestOfCategory, data)
            pool.shutdown()
            pool.wait()
        except:
            logger.error(traceback.format_exc())
    
if __name__ == '__main__':
    
    logger.info('start crawler tracnghiemtienganh')
    cr = Crawler(MONGO_SERVER, MONGO_PORT)
    cr.process()
    logger.info('finished crawler tracnghiemtienganh')
    os._exit(1)
