'''
Created on 02/07/2010

@author: sbrito
'''

from urllib2 import build_opener, URLError, HTTPError, HTTPCookieProcessor
from lib.utils import log
from etc import constants
from etc import config

import cookielib
import time
import base64
import Image
import StringIO

def load_candidate(url, uf, candidate_type, candidate_id, scrap_callback):
    cj = cookielib.CookieJar()
    opener = build_opener(HTTPCookieProcessor(cj))

    candidate_po = load_and_scrap(url, scrap_callback, opener)
    
    if candidate_po != None:
        candidate_po['uf'] = uf
        candidate_po['candidate_type'] = candidate_type
        candidate_po['candidate_id'] = candidate_id

        img = load_page(constants.URL_IMG, opener)
        
        if config.SAVE_IMAGE_ON_DATABASE:
            candidate_po['img'] = base64.encodestring(img)
        else:
            candidate_po['img'] = ''
            
        if config.SAVE_IMAGE_AS_FILE:
            Image.open(StringIO.StringIO(img)).save('../data/%s.jpg' % candidate_id)
    
    return candidate_po;

def load_and_scrap(url, scrap_callback, opener=None):
    if opener == None:
        opener = build_opener()
    
    result = None
    scrap = None
    tries = 0
    
    while (result == None or scrap == None) and tries < config.MAX_RETRIES:
        if tries > 0:
            log('retrying in %s secs [%s]' % (config.DELAY_BETWEEN_RETRIES,url))
            time.sleep(config.DELAY_BETWEEN_RETRIES)
        
        result = load_page(url, opener)
        #log(url)
        #log(result);
        scrap = scrap_callback(result)
        tries += 1        
    
    return scrap

def load_page(url, opener):
    try:
        result = opener.open(url, timeout=config.REQUEST_TIMEOUT)
        return result.read()
    except URLError:
        return None
    except HTTPError:
        return None