#coding: utf8

import urlparse
import time
import os.path
import re
from pygeocoder import Geocoder
from decimal import *
from scrapy.http import Request
from scrapy.http import FormRequest
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from fresh.items import RestaurantItem
from fresh.items import RestaurantReview
from fresh.SalirMasterDB import SalirMasterDB
import datetime
import MySQLdb
from fresh.utils import Utils
from fresh.SalirUtils import SalirUtils
import sys
from scrapy import signals
    

class SalirHistSpider(BaseSpider):

    handle_httpstatus_list = [404,410] 
    name = "salirhist"
    allowed_domains = ["salir.com"]
    start_urls = []
    
    db = SalirMasterDB()

    utils=Utils()
    salirUtils = SalirUtils()
    logLocalName="SALIR_HIST_SCRAPPY"

    numRestFound=0
    
    def __init__(self, *args, **kwargs):
        super(SalirHistSpider, self).__init__()
        reload(sys)
        sys.setdefaultencoding('utf-8')
        time = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        filename = time + '.log'
        filename = os.path.join('logSalirHist/',filename)
        self.utils.logProcessInit(filename)


        self.db.utils=self.utils
        self.salirUtils.utils=self.utils

        self.utils.logProcessBegin(self.logLocalName)

        if (self.db.connectDB()):
            self.db.readURLs(self.start_urls)

    def make_requests_from_url(self, url):
        idRestSource=None
        idRestSource = self.db.getRestFromUrl(url)


        return FormRequest(url,
                    formdata={'ordenar': '-fecha', 'resultadosporpagina': '60'},
                    meta = {'idRestSource': idRestSource},
                    dont_filter=True,
                    callback=self.parse)
        
        
        """return Request(url, dont_filter=True, meta = {'idRestSource': idRestSource})"""
    
    def parse(self, response):
        try:
            hxs = HtmlXPathSelector(response)
            idRestSource = response.meta['idRestSource']
            if idRestSource is None:
                self.utils.logProcessDetail("ERROR DE PROGRAMACIÓN/BD NO SE PUEDE OBTENER EL CODIGO DE RESTAURANTE A PARTIR DE URL: "+str(response.url) , self.utils.ERROR)
                return

            """check if the response is a not found"""
            if response.status in self.handle_httpstatus_list:               
                self.db.setRestaurantStatus(idRestSource,self.db.NACTIVE)
                return

            xpathActive = hxs.select('//p[@id="itemcerrado"]').extract()
            sActive = self.utils.convertListToString(xpathActive)
            if len(sActive)>0:
                self.db.setRestaurantStatus(idRestSource,self.db.NACTIVE)
                return
            

            self.numRestFound=self.numRestFound+1
            self.utils.log("debug","num restaurants found: "+str(self.numRestFound),self.utils.DEBUG)
         
            restaurant = RestaurantItem()
            restaurant['idRestSource'] = idRestSource
            restaurant['url_scrapy'] = response.url

            self.salirUtils.fillRestaurant(response,hxs,restaurant)

            self.utils.log("debug","restaurant: "+str(restaurant),self.utils.DEBUG)

            bYield=False
            for l in hxs.select('//div[@id="item-opiniones"]/div[@class="top hreview"]'):
                """from scrapy.shell import inspect_response
                inspect_response(response)"""
                #hLeerMas=l.select('div[@class="item"]/div[@class="cont"]/h5[@class="enlaces"]/a/@href').extract()
                hLeerMas=l.select('div[@class="item"]/h3/a/@href').extract()
                self.utils.log("debug","LEERMAS: " + str(hLeerMas) ,self.utils.DEBUG)
                sLeerMas=self.utils.convertListToString(hLeerMas)
                if sLeerMas is not None and len(sLeerMas)>0:
                    bYield=True
                    yield Request(sLeerMas,callback=self.parse_postLeerMas,meta={'restaurant': restaurant})
                else:
                    #self.salirUtils.fillRestaurantComment(response,l,restaurant)
                    self.utils.logProcessDetail("Error al parse: " + "NO PUDO LEER DETALLE DE OPINION",self.utils.ERROR)
                
            #self.utils.log("debug","restaurant: " + str(restaurant) ,self.utils.DEBUG)
            #if len(restaurant['reviews']) > 0:
            #yield restaurant

            #just yield restaurant when no previous yield done
            if bYield==False:
                yield restaurant

            

        except Exception,e:
            self.utils.logProcessDetail("Error al parse: " + str(e),self.utils.ERROR)

    def parse_postLeerMas(self, response):
        try:
            hxs = HtmlXPathSelector(response)
            restaurant = response.meta['restaurant']
            
            self.salirUtils.fillRestaurantCommentLeerMas(response,restaurant)

            return restaurant

        except Exception,e:
            self.utils.logProcessDetail("Error al parse_postLeerMas: " + str(e),self.utils.ERROR)


    def close_spider(self):
        self.utils.logProcessEnd("Robot SalirHist updated: "+str(self.numRestFound) + " restaurants", self.utils.DEBUG)

   
