#!/usr/bin/python



import threading
import time
from random import randint
from urllib2 import urlopen
from urllib2 import build_opener
from bs4 import BeautifulSoup, SoupStrainer
from pymongo import MongoClient
from bson.objectid import ObjectId
import madsutil
from madsutil import log
from madsutil import logt
from madsutil import url_fix
from time import gmtime, strftime

class CardRetriever(threading.Thread):
    '''
    This working class read the card list anc call for
    each card the cardParser thread
    as a separate thread
    '''
    
    def __init__(self, appender=None,q=None,ql=None, update=False):
        super(CardRetriever, self).__init__()
        self._appender = appender
        self._queue = q
        self._queueLock = ql
        self._counter = 0
        self._update = update
        
    
    def run(self):
        start_time = time.time()
        log ('[Retriever] Start on ' + strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()),self._appender)
        client = MongoClient(madsutil.DB_URL)
        db = client[madsutil.DB_NAME]
        cards = db.cards
        deckboxurl = "http://deckbox.org/games/mtg/cards"
        
        tablesparse = SoupStrainer('div',{'id': 'set_cards_table'})
        html = urlopen(deckboxurl).read()
        page = BeautifulSoup(html,parse_only=tablesparse)
        
        cardtable = page.find("table", {"class" : "set_cards simple_table main full"})
        pagination = page.find("div", {"class" : "pagination_controls"});
        _header = 0;

        _page = 1;
        while self.getnextpag(pagination):
            
            for row in cardtable.findAll('tr'):
                if(_header<2):
                    _header+=1
                    continue
                try:
                    cols = row.findAll('td')
                    name = cols[0].find('a').contents[0].string.strip()
                    link = url_fix(cols[0].find('a')['href'])
                    card = {"name":name,
                            "link":link}

                    founded = cards.find_one({"name": card['name']})

                    if not founded or self._update:
                        if not self._queue.full():
                            self._queueLock.acquire()

                            if self._update and founded:
                                card["_id"] = founded["_id"]

                            data = 'Card ' + str(card)
                            self._queue.put(card) 
                            self._queueLock.release()
                            self._counter+=1
                            log( '[Retriever] put: ' + str(card),self._appender)
                            time.sleep(0.1)
                        else:
                            log('[Retriever] Queue full: wait',self._appender)
                            time.sleep(5)


                        
                except Exception as e:
                    log('[Retriever] Card name: ' + card['name'],self._appender)
                    log( '[Retriever] Card link: ' + card['link'],self._appender)
                    log( "[Retriever] Unable to parse this card: "+str(e),self._appender)
                    log(str(card),self._appender)
                    madsutil.END_FLAG = True
                    raise
            _header=0
            deckboxurl = "http://deckbox.org"+self.getnextpag(pagination)['href']
            html = urlopen(deckboxurl).read()
            page = BeautifulSoup(html,parse_only=tablesparse)
            cardtable = page.find("table", {"class" : "set_cards simple_table main full"})
            pagination = page.find("div", {"class" : "pagination_controls"});
            _page +=1
             
        
        client.close()
        logt ('[Retriever] Finish',start_time,self._appender)
        madsutil.END_FLAG = True
    
    def getnextpag(self,pagination):
        '''retrieve the next link on the pagination div of the page if exist'''
        for link in pagination.findAll('a'):
            if link.string.strip() == 'Next':
                return link
        return None
        
        