#coding: utf8
import urlparse
import time
import os.path
from scrapy import log
import re
from pygeocoder import Geocoder
from decimal import *
from scrapy.http import Request
from scrapy.http import FormRequest
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from fresh.items import RestaurantItem
from fresh.items import RestaurantReview
from fresh.TenedorMasterDB import TenedorMasterDB
import datetime
import MySQLdb
from fresh.utils import Utils
from fresh.TenedorUtils import TenedorUtils
import sys
import json

class TenedorHistSpider(BaseSpider):

    handle_httpstatus_list = [404,410] 
    name = "tenedorhist"
    allowed_domains = ["eltenedor.es"]
    start_urls = []
    db = TenedorMasterDB()
    logLocalName="TENEDOR_HIST_SCRAPPY"
    numRestFound=0
    
    utils = Utils()
    tenedorUtils = TenedorUtils()

    
    def __init__(self, *args, **kwargs):
        super(TenedorHistSpider, self).__init__()
        reload(sys)
        sys.setdefaultencoding('utf-8')
        time = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        filename = time + '.log'
        filename = os.path.join('logTenedorHist/',filename)
        self.utils.logProcessInit(filename)
        
        self.utils.logProcessBegin(self.logLocalName)

        
        self.db.utils=self.utils
        self.tenedorUtils.utils = self.utils
        
        if self.db.connectDB():
            self.db.readHistURLs(self.start_urls)
        else:
            raise CloseSpider(reason='Critical Error, can not connect to DB')

    def make_requests_from_url(self, url):
        idRestSource=None
        idRestSource = self.db.getRestFromUrl(url)
        
        
        return Request(url, dont_filter=True, meta = {'idRestSource': idRestSource})
        
                
    
    def parse(self, response):
        try:
            hxs = HtmlXPathSelector(response)
            idRestSource = response.meta['idRestSource']
            if idRestSource is None:
                self.utils.logProcessDetail("ERROR DE PROGRAMACIÓN/BD NO SE PUEDE OBTENER EL CODIGO DE RESTAURANTE A PARTIR DE URL: "+str(response.url) , self.utils.ERROR)
                return

            """check if the response is a not found"""
            if response.status in self.handle_httpstatus_list:               
                self.db.setRestaurantStatus(idRestSource,self.db.NACTIVE)
                return
            if str(response.url).strip() == 'http://www.eltenedor.es/':
                self.db.setRestaurantStatus(idRestSource,self.db.NACTIVE)
                return

            restaurant = RestaurantItem()
            restaurant['idRestSource'] = idRestSource
            restaurant['url_scrapy'] = response.url

            self.numRestFound=self.numRestFound+1
            self.utils.log("debug","num restaurants found: "+str(self.numRestFound),self.utils.DEBUG)
            
            self.tenedorUtils.fillRestaurant(response,hxs,restaurant)

            self.utils.log("debug","restaurant: "+str(restaurant),self.utils.DEBUG)

            
            ipage=1
            """
            this parse comments does not deactivates the checkbox that shows all posts whether comments were made or not
            ipage=1
            url_page = '/%s'%(str(ipage))
            url_comments = urlparse.urljoin(response.url,url_page)
            return Request(response.url,callback=self.parse_comments,meta={'restaurant': restaurant,'ipage':ipage} )
            """
            po=str(response.url).rfind("/")
            if po != 1:
                try:
                    idRestTenedor=int(str(response.url)[po+1:])
                    self.utils.log("debug","idRestTenedor: " + str(idRestTenedor) ,self.utils.DEBUG)
                    url_end="%s/contentopiniones?uidRestaurant=%s&filters[is_special_offer]=ALL&filters[lunch_type]=ALL&filters[occasion]=ALL&filters[customer_gamification_level]=ALL&filters[with_comments_only]=0&sort=RESERVATION_DATE_DESC&page=1" %(idRestTenedor,idRestTenedor)
                    url_comments = urlparse.urljoin(response.url,url_end)
                    #url_comments = 'http://www.eltenedor.es/restaurante/loidi-hotel-condes-de-barcelona/1834/contentopiniones?uidRestaurant=1834&filters[is_special_offer]=ALL&filters[lunch_type]=ALL&filters[occasion]=ALL&filters[customer_gamification_level]=ALL&filters[with_comments_only]=0&sort=RESERVATION_DATE_DESC&page=1'
                    self.utils.log("debug","url_comments: " + str(url_comments) ,self.utils.DEBUG)
                    return Request(url_comments,callback=self.parse_comments,meta={'restaurant': restaurant,'ipage':ipage} )
                except Exception,e:
                    self.utils.logProcessDetail("Error: " +str(e)+ "Can not find restaurante code from url: " + str(response.url) +" can not look for posts" ,self.utils.ERROR)
                    
            else:
                self.utils.logProcessDetail("id_source:" + str(restaurant['idRestSource']) + "Can not find restaurante code from url: " + str(response.url) +" can not look for posts" ,self.utils.ERROR)
                


        except Exception,e:
            self.utils.logProcessDetail("Error parsing restaurant en TENEDOR: " + str(restaurant) + "detail: " +str(e) ,self.utils.ERROR)

    def parse_comments(self, response):
        try:
            myjsonResponse = None
            myjsonContent = None
            hxs = None
            restaurant = response.meta['restaurant']
            ipage = response.meta['ipage']

            """
            from scrapy.shell import inspect_response
            inspect_response(response)
            """
            
        
            myjsonResponse = json.loads(response.body)
            myjsonContent = str(myjsonResponse["content"])
            #self.utils.log("debug","myjsonContent: " + str(myjsonContent) ,self.utils.DEBUG)
            hxs = HtmlXPathSelector(text=myjsonContent)
         
            self.tenedorUtils.fillRestaurantComments(hxs,restaurant)

            return restaurant


        except Exception,e:
            self.utils.logProcessDetail("Error  en parse comments el tenedor: " + str(e) + " restaurant: " + str(restaurant),self.utils.ERROR)


    def close_spider(self):
        self.utils.logProcessEnd("Robot HIST - TENEDOR found: "+str(self.numRestFound), self.utils.DEBUG)
