#coding=utf-8
from google.appengine.ext import db
from models import *
from google.appengine.ext.db import GqlQuery
from BeautifulSoup import BeautifulSoup

import util
import logging

#restaurants in tel-aviv (2eat)
URL_RESTS_IN_ISRAEL = "http://www.restaurants-in-israel.co.il/SearchResults.aspx?area=1&city=1"
URL_2EAT_BARS = "http://www.2eat.co.il/searchrestaurants.aspx?RLoc=9&RSub=1&RType=33"

def analyze_rest_in_israel(next_url):
    
    logging.info("Start parse rests in Israel data base")
    html = util.util_html_get(next_url.next_url)
    
    #loop for each page
    while html != None:
        
        #parsing
        html_soup = BeautifulSoup(html)
        
        
        #get wanted table
        result_table = html_soup.find(name = "table",attrs = {"title" : "Restaurants Search Resalut"}, recursive = True)
        
        #validate result
        if (result_table == None):
            logging.info("did not found data base table")
            return
        
        table_soup = BeautifulSoup(str(html_soup))
       
        #get restaurants
        results_col =  table_soup.findAll(name = "tr", attrs = {"class" : "resaultsTr"}, recursive = True)
        
        
        for col in results_col:
            
            
            col_soup = BeautifulSoup(str(col))
            
            #getting a cell in the table
            result_cell =  col_soup.findAll(name = "td", attrs = {"class" : "resaultsTd"}, recursive = True)
            
            # 0 - name
            # 1 - more_detailes
            # 2 - address
            # 3 - Phone
            # 4 - coupon
            # 5 - resrvation
            # 6 - site
            
            #create new business
            business = Business()
            business._source_type = "auto"
            business._source = URL_RESTS_IN_ISRAEL
            
            for cell_index in reversed(range(7)):
                cell = result_cell.pop()
                if (cell_index == 0):
                    business.name = cell.getText()
                    if (business.name.lower().find("bar") != -1 or business.name.lower().find("pub") != -1):
                        business.business_type = "pub"
                    else:
                        business.business_type = "rest"
                    
                elif (cell_index == 2):
                    business.full_address = cell.getText(" ").strip("\n")
                    business.country = "Israel"
                    business.city = "Tel Aviv"
                elif (cell_index == 4):
                    
                    #get coupon url if exists
                    coupon_soup = BeautifulSoup(str(cell))
                    result = coupon_soup.find(name = "a", recursive = True)
                    if (result == None):
                        deal = None
                    else:      
                        coupon_url = "http://www.restaurants-in-israel.co.il/" + result.attrs[1][1]
                        #parse rests in Israel coupon
                        deal = parse_rests_in_Israel_coupon(coupon_url)
                     
                elif (cell_index == 6):
                    #find website
                    coupon_soup = BeautifulSoup(str(cell))
                    result = coupon_soup.find(name = "a", recursive = True)
                    if (result == None):
                        continue
                    else:
                        business.website = result.attrs[2][1]
            
            
            #add to data base
            util.add_business_and_deal_to_db(business, deal)
    
        #find next page
        result_next_page = html_soup.find(name = "a",attrs = {"title" : "Next Page"}, recursive = True)
        if (result_next_page == None or result_next_page.attrs == None):
            html = None
            db.delete(next_url)
            logging.info("Finish parse rests in Israel data base")
        else:
            url_suffix = result_next_page.attrs[1][1]
            if (url_suffix == None):
                db.delete(next_url)
                html = None
                logging.info("did not found next page")
            else:
                next_url.next_url = "http://www.restaurants-in-israel.co.il/" + url_suffix
                next_url.put()
            
            logging.debug("next url page to parse = " + unicode(next_url.next_url))
            html = util.util_html_get(next_url.next_url)
        

def analyze_2eat_bars(next_url):
    
    logging.info("Start parse 2eat bars data base")
    html = util.util_html_get(next_url.next_url)
    
    #loop for each page
    while html != None:
        
        #parsing
        html_soup = BeautifulSoup(html)
        
        #find next page
        result_next_page = html_soup.find(name = "a",attrs = {"title" : u'עוד מסעדות'}, recursive = True)
        if (result_next_page == None or result_next_page.attrs == None):
            html = None
            db.delete(next_url)
        else:
            url_suffix = result_next_page.attrs[2][1]
            if (url_suffix == None):
                db.delete(next_url)
                html = None
            else:
                next_url.next_url = "http://www.2eat.co.il/" + url_suffix
                next_url.put()
            
            logging.debug("next url page to parse = " + unicode(next_url.next_url))
            html = util.util_html_get(next_url.next_url)
        
        #get wanted table
        result_table = html_soup.find(name = "table",attrs = {"id" : "ctl00_ContentPlaceHolder1_Searchresaultstable1_gvResults"}, recursive = True)
        
        #validate result
        if (result_table == None):
            continue
        
        table_soup = BeautifulSoup(str(result_table))
       
        #get restaurants
        results_col =  table_soup.findAll(name = "tr", recursive = True)
        
        
        for col in results_col:
            
            
            col_soup = BeautifulSoup(str(col))
            
            #checking if it is a result col
            
            result = col_soup.find("td", attrs= { "colspan": "7"}, recursive = True)
            if (result != None): continue
            
            #getting a cell in the table
            result_cell = col_soup.findAll("td", recursive = True)
            if (result_cell == None or len(result_cell) == 0): continue
            
            # 0 - -----
            # 1 - coupons
            # 2 - website
            # 3 - name + address
            # 4 - address
            # 5 - type
            # 6 - -----
            
            #create new business
            business = Business()
            business._source_type = "auto"
            business._source = URL_2EAT_BARS
            
            for cell_index in reversed(range(len(result_cell))):
                cell = result_cell.pop()
                if (cell_index == 3):
                    business.name = BeautifulSoup(unicode(cell)).find("a").getText().replace("\n", "")
                    business.business_type = "pub"
                    business.full_address = BeautifulSoup(unicode(cell)).getText("#").split("#")[4]
                elif (cell_index == 4):
                    business.country = "Israel"
                    business.city = "Tel Aviv"
                elif (cell_index == 1):
                    
                    #get coupon url if exists
                    coupon_soup = BeautifulSoup(unicode(cell))
                    result = coupon_soup.find(name = "a", recursive = True)
                    if (result == None):
                        deal_lst = None
                    else:
                        logging.debug("coupon url = " + result.attrs[2][1])            
                        coupon_url = unicode(result.attrs[2][1])
                        #parse 2eat coupon
                        deal_lst = parse_2eat_coupon_bars(coupon_url)
                     
                elif (cell_index == 2):
                    #find website
                    coupon_soup = BeautifulSoup(unicode(cell))
                    result = coupon_soup.find(name = "a", recursive = True)
                    if (result == None):
                        continue
                    else:
                        business.website = result.attrs[2][1]
            
            
            #add to data base
            business.full_address += u' תל אביב, ישראל '
            util.add_business_and_deal_to_db(business, deal_lst, is_deal_lst=True, lang="he")
#            logging.debug(business.to_xml())
#            if (deal_lst != None):
#                for deal in deal_lst:
#                    logging.debug(deal.to_xml())
    

            
def parse_rests_in_Israel_coupon(url):
    
    
    #parsing
    html_soup = BeautifulSoup(util.util_html_get(url))
    #find coupon details
    coupon_result = html_soup.find(name = "span", attrs = {"id" : "fvCoupon_ctl00_Label2"}, recursive = True)
    if (coupon_result == None): return None
    
    
    deal = Deal()
    deal.time = "all"
    deal.time_delta = "all"
    deal.deal_info = ""
    deal._source_type = "auto"
    deal._source = url
    
    
    deal.name = coupon_result.getText().replace('|', '')
    deal.deal_info += deal.name
    info = html_soup.find(name = "td", attrs = {"id" : "bottomID132"}, recursive=True)
    if (info != None):
        deal.deal_info +=  "\n" + info.getText().replace('|', '')
    
    return deal

def parse_2eat_coupon_bars(url):
    
    
    #parsing
    html_soup = BeautifulSoup(util.util_html_get(url))
    #find coupon details
    coupon_results = html_soup.findAll(name = "table", attrs = {"cellpadding" : "0"}, recursive = True)
    if (coupon_results == None): return None
    
    deal_lst = list()
    for coupon_result in coupon_results:
    
        deal = Deal()
        deal.time = "all"
        deal.time_delta = "all"
        deal.deal_info = ""
        deal._source_type = "auto"
        deal._source = url
        
        deal_soup = BeautifulSoup(unicode(coupon_result))
        info = deal_soup.find(name = "big", recursive=True)
        if (info != None):
            deal.name = info.getText().replace('|', '')
            deal.deal_info += deal.name
            
        more_info = deal_soup.find(name = "td", attrs = {"class" : "CopponBottom"}, recursive=True)
        if (more_info != None):
            deal.deal_info +=  "\n" + more_info.getText().replace('|', '')
        deal_lst.append(deal)
        
    return deal_lst
