#coding: utf8

import urlparse
import time
import os.path
import re
from pygeocoder import Geocoder
from decimal import *
from scrapy.http import Request
from scrapy.http import FormRequest
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from fresh.items import RestaurantItem
from fresh.items import RestaurantReview
from fresh.SalirMasterDB import SalirMasterDB
import datetime
import MySQLdb
from fresh.utils import Utils
from fresh.SalirUtils import SalirUtils
import sys
from scrapy import signals
    

class SalirMasterSpider(BaseSpider):
    #"http://madrid.salir.com/restaurantes"
    name = "salirmaster"
    allowed_domains = ["salir.com"]
    start_urls = [
        "http://madrid.salir.com/restaurantes~270?"
        ]
    db = SalirMasterDB()

    utils=Utils()
    salirUtils = SalirUtils()
    logLocalName="SALIR_MAST_SCRAPPY"

    numRestFound=0
    
    def __init__(self, *args, **kwargs):
        super(SalirMasterSpider, self).__init__()
        reload(sys)
        sys.setdefaultencoding('utf-8')
        time = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        filename = time + '.log'
        filename = os.path.join('logSalirMaster/',filename)
        self.utils.logProcessInit(filename)

        self.db.utils=self.utils
        self.salirUtils.utils=self.utils

        self.utils.logProcessBegin(self.logLocalName)

        self.db.connectDB() 
    
    def parse(self, response):
        try:
            hxs = HtmlXPathSelector(response)


            """
            item=1
            items="item"+str(item)
            xpath = '//div[@id="itemList"]/div[@id="%s"]' % (items)
            field = hxs.select(xpath)
            if (len(field.extract())==0):
                field=hxs.select('//div[@id="itemList"]/div[@class="item destacado conFoto"]')
            while (len(field)>0):
                restaurant = RestaurantItem()
                            
                #Get details from restaurant from its URL
                url = field.select('div[@class="clearfix"]/h3/a/@href').extract()
                if len(url)>0:
                    self.utils.logProcessDetail("Error al parse: " + str(e),self.utils.DEBUG)
                    restaurant['url_scrapy'] = url[0]
                    yield FormRequest(url[0],formdata={'ordenar': '-fecha', 'resultadosporpagina': '60'},callback=self.parse_restaurant,meta={'restaurant': restaurant} )

                    
                item=item+1
                items="item"+str(item)
                xpath = '//div[@id="itemList"]/div[@id="%s"]' % (items)
                field = hxs.select(xpath)
            """
            for list in hxs.select('//div[@class="clearfix"]/h3'):
                #Get details from restaurant from its URL
                url = list.select('a/@href').extract()
                if len(url)>0:
                    self.utils.logProcessDetail("Restaurant yield url: " + str(url),self.utils.DEBUG)
                    restaurant = RestaurantItem()
                    restaurant['url_scrapy'] = url[0]
                    yield FormRequest(url[0],formdata={'ordenar': '-fecha', 'resultadosporpagina': '60'},callback=self.parse_restaurant,meta={'restaurant': restaurant} )
                

            #from scrapy.shell import inspect_response
            #inspect_response(response)

            """let's go for next page"""
            url=hxs.select('//div[@id="itemList"]/p[@class="paginador"]/a[@class="siguiente linkPaginador"]/@href').extract()
            if len(url)>0:
                url = urlparse.urljoin(response.url,url[0])
                yield Request(urlparse.urljoin(response.url, url),callback=self.parse)



        except Exception,e:
            self.utils.logProcessDetail("Error al parse: " + str(e),self.utils.ERROR)

              

    def parse_restaurant(self, response):
        try:

            """
            from scrapy.shell import inspect_response
            inspect_response(response)
            """
            
            hxs = HtmlXPathSelector(response)
            self.numRestFound=self.numRestFound+1
            self.utils.log("debug","num restaurants found: "+str(self.numRestFound),self.utils.DEBUG)
         
            restaurant = response.meta['restaurant']

            self.salirUtils.fillRestaurant(response,hxs,restaurant)

            return restaurant
            
            
        except Exception,e:
            self.utils.logProcessDetail("Error al parse_restaurant: " + str(restaurant) + " error: " + str(e),self.utils.ERROR)
        else:
            return restaurant


    def close_spider(self):
        self.utils.logProcessEnd("Robot Salir found: "+str(self.numRestFound) + " restaurants", self.utils.DEBUG)

   
