#coding=utf-8
import urllib2
import string
from models import Business, Deal, Comment, User
from datetime import *
import logging
import math

from BeautifulSoup import *
from googlemaps import GoogleMaps, GoogleMapsError
import Consts

def util_html_get(url):
    try:
        html_opener = urllib2.build_opener()
        html = html_opener.open(url).read()
        html_opener.close()
        return html
    except:
        return ''
    
    
def produce_result_xml(result):
    xml_result = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
    xml_result += "<root>\n"
    xml_result += "<result>{0}</result>\n".format(result)
    xml_result += "</root>\n"
    return xml_result


##############################################
#parse an xml of models and add or update them

##
#for business
def get_business_from_xml(business_xml, produce_new = True, update_changes = True):
    soup = BeautifulStoneSoup(business_xml)
    business_entity = soup.find(name = "entity", attrs = {"kind" : "Business"}, recursive = True)
    
    #if business not found
    if (business_entity == None):
        return None
    
    keys = business_entity.attrs
    key_str = str(keys[1][1])
    if (key_str != ""):
        business_lst = Business.get([key_str])
        business = business_lst[0]
    else:
        if (not produce_new):
            return None
        business = Business()
    
    #check if should update changes that done
    if (not update_changes):
        return business
    
    #parse xml
    business_soup = BeautifulStoneSoup(unicode(business_entity))
    name = business_soup.find(name = "property", attrs = {"name" : "name"}, recursive = True)
    if (len(name) != 0):
        business.name = unicode(name.contents[0])
    country = business_soup.find(name = "property", attrs = {"name" : "country"}, recursive = True)
    if (len(country) != 0):
        business.country = unicode(country.contents[0])
    city = business_soup.find(name = "property", attrs = {"name" : "city"}, recursive = True)
    if (len(city) != 0):
        business.city = unicode(city.contents[0])
    street = business_soup.find(name = "property", attrs = {"name" : "street"}, recursive = True)
    if (len(street) != 0):
        business.street = unicode(street.contents[0])
    street_num =  business_soup.find(name = "property", attrs = {"name" : "street_num"}, recursive = True)
    if (len(street_num) != 0):
        business.street_num = unicode(street_num.contents[0])
    business_type = business_soup.find(name = "property", attrs = {"name" : "business_type"}, recursive = True)
    if (len(business_type) != 0):
        business.business_type = unicode(business_type.contents[0])
    website = business_soup.find(name = "property", attrs = {"name" : "website"}, recursive = True)
    if (len(website) != 0):
        business.website = unicode(website.contents[0])
    more_info = business_soup.find(name = "property", attrs = {"name" : "more_info"}, recursive = True)
    if (len(more_info) != 0):
        business.more_info = unicode(more_info.contents[0])
    full_address = business_soup.find(name = "property", attrs = {"name" : "full_address"}, recursive = True)
    if (len(full_address) != 0):
        business.full_address = unicode(full_address.contents[0])
    latitude = business_soup.find(name = "property", attrs = {"name" : "latitude"}, recursive = True)
    if (len(latitude) != 0):
        business.latitude = unicode(latitude.contents[0])
    longitude = business_soup.find(name = "property", attrs = {"name" : "longitude"}, recursive = True)
    if (len(longitude) != 0):
        business.longitude = unicode(longitude.contents[0])
    
    
        
    return business
##
#for deals
def get_deal_from_xml(deal_xml, produce_new = True, update_changes = True):
    if (deal_xml == None):
        return None
    soup = BeautifulStoneSoup(deal_xml)
    deal_entity = soup.find(name = "entity", attrs = {"kind" : "Deal"}, recursive = True)
    
    #if deal not found
    if (deal_entity == None):
        return None
    
    keys = deal_entity.attrs
    key_str = unicode(keys[1][1])
    if (key_str != ""):
        deal_lst = Deal.get([key_str])
        deal = deal_lst[0]
        new_deal = False
    else:
        if (not produce_new):
            return None
        deal = Deal()
        new_deal = True
    
    #check if should update changes that done
    if (not update_changes):
        return deal
    
    #parse xml
    deal_soup = BeautifulStoneSoup(unicode(deal_entity))
    name = deal_soup.find(name = "property", attrs = {"name" : "name"}, recursive = True)
    if (len(name) != 0):
        deal.name = unicode(name.contents[0])
    time = deal_soup.find(name = "property", attrs = {"name" : "time"}, recursive = True)
    if (len(time) != 0):
        deal.time = unicode(time.contents[0])
    time_delta = deal_soup.find(name = "property", attrs = {"name" : "time_delta"}, recursive = True)
    if (len(time_delta) != 0):
        deal.time_delta = unicode(time_delta.contents[0])
    days_bitmap = deal_soup.find(name = "property", attrs = {"name" : "days_bitmap"}, recursive = True)
    if (len(days_bitmap) != 0):
        deal.days_bitmap = string.atoi(days_bitmap.contents[0])
    deal_info =  deal_soup.find(name = "property", attrs = {"name" : "deal_info"}, recursive = True)
    if (len(deal_info) != 0):
        deal.deal_info = unicode(deal_info.contents[0])
    to_rate_str = deal_soup.find(name = "property", attrs = {"name" : "to_rate"}, recursive = True)
    if (len(to_rate_str) != 0):
        to_rate = string.atoi(to_rate_str.contents[0])
        if (to_rate >= 0):
            deal.rate_sum = deal.rate_sum + to_rate
            deal.rate_num = deal.rate_num + 1
    to_cost_level_str = deal_soup.find(name = "property", attrs = {"name" : "to_cost_level"}, recursive = True)
    if (len(to_cost_level_str) != 0):
        to_cost_level = string.atoi(to_cost_level_str.contents[0])
        if (to_cost_level >= 0):
            deal.cost_level_sum = deal.cost_level_sum + to_cost_level
            deal.cost_level_num = deal.cost_level_num + 1
    
    return deal

##
#for comments
def get_comment_from_xml(comment_xml):
    soup = BeautifulStoneSoup(comment_xml)
    comment_entity = soup.find(name = "entity", attrs = {"kind" : "Comment"}, recursive = True)
    
    #if comment not found
    if (comment_entity == None):
        return None
    
    keys = comment_entity.attrs
    key_str = unicode(keys[1][1])
    if (key_str != ""):
        comment_lst =  Comment.get([key_str])
        comment = comment_lst[0]
        return comment
    
    return None
    
##
#if deal_delta_time=0 ignores time, if deal time = "all" "" or None ignores
def is_deal_in_datetime(deal, query_days_bitmap, query_time, query_delta_time):
    
    #unwanted query - 127 for all days
    if (query_days_bitmap == None or query_days_bitmap == ""):
        return True
    
    #filter by days_bitmap                    
    if ((deal.days_bitmap & string.atoi(query_days_bitmap)) == 0):
        return False
    
    #unwanted query
    if (query_delta_time == None or query_delta_time == "" or query_delta_time == "all"):
        return True
    
    #filter by time
    query_time_lst = query_time.split(":")
    query_time_sum = string.atoi(query_time_lst[0]) * 60 + string.atoi(query_time_lst[1])
    
    #validate deal.time
    if (deal.time == None or deal.time == ""):
        logging.critical("ERROR: deal with no time")
        return False
    
    #all times deal
    if (deal.time == "all"):
        return True
    
    
    deal_time_lst = deal.time.split(":")
    deal_time_sum = string.atoi(deal_time_lst[0]) * 60 + string.atoi(deal_time_lst[1])    
    
    if (((query_time_sum + string.atoi(query_delta_time)*60) >= deal_time_sum) and \
        (query_time_sum <= (deal_time_sum + string.atoi(deal.time_delta)*60))):
        return True
    else:
        return False
    
##
#remove '-' ' ' '_' and to lower case
def fix_word(str):
    str = string.replace(str, " ", "")
    str = string.replace(str, "-", "")
    str = string.replace(str, "_", "")
    str = string.lower(str)
    return str


def get_exist_business(business):
    exist_business = Business.all()
    exist_business.filter("name == ", business.name)
    exist_business.filter("full_address == ", business.full_address)
    if (exist_business.count(limit=1) != 0):
        return exist_business.get()
    
    exist_business = Business.all()
    exist_business.filter("name == ", business.name)
    exist_business.filter("country ==", business.country)
    exist_business.filter("city ==", business.city)
    exist_business.filter("street ==", business.street)
    exist_business.filter("street_num ==", business.street_num)
    if (exist_business.count(limit=1) != 0):
        return exist_business.get()
    else:
        return None


def get_exist_deal(deal):
    exist_deal = Deal.all()
    exist_deal.filter("business_item == ", deal.business_item)
    exist_deal.filter("name == ", deal.name)
    exist_deal.filter("days_bitmap ==", deal.days_bitmap)
    return exist_deal.get()

##
#add a deal and a business to data base
#checks if exists and renew if needed
def add_business_and_deal_to_db(business, deal, is_deal_lst = False, lang="en"):
    if (deal == None):
        return
    if (is_deal_lst and len(deal) == 0):
        return
    
    #search for exist business
    old_business = get_exist_business(business)
    if (old_business == None):
        #find business link_pic
        if (business.website != None and business.website != ""):
            website_html = util_html_get(business.website)
            website_soup = BeautifulSoup(website_html)
            img_lst = website_soup.findAll(name = "img", recursive = True)
            #default
            img_logo = ""
            max_img = ""
            max_img_len = 0
            
            for img_total in img_lst:
                try:
                    img = img_total['src']
                    logging.debug(unicode(img_total))
                except:
                    continue
                try:
                    html_opener = urllib2.build_opener()
                    real_url = html_opener.open(business.website).geturl()
                    if (real_url != None and real_url != ""):
                        if (img.startswith("..")):
                            img = real_url.rpartition("/")[0].rpartition("/")[0] + "/" + img.lstrip("..")                                  
                        elif (img.startswith(".") or img.startswith("/")):
                            img = real_url.rpartition("/")[0] + "/" + img.lstrip(".")
                    html_img = html_opener.open(img).info()        
                    img_len = int(html_img.getaddr("Content-Length")[1])
                    logging.debug("src = " + unicode(img) + "Content-Length = " + unicode(img_len))
                except:
                    img_len = 0

                    
                if (img_len > max_img_len):
                    max_img = img
                    max_img_len = img_len
                if ("logo" in img.lower() and img_logo == ""):
                    img_logo = img
                    
            if (max_img_len > 5000):
                business.pic_link = max_img
            elif (img_logo != ""):
                business.pic_link = img_logo
            
            #workaround for redirection and '..' symbol
            if (business.pic_link != None):
                try:
                    html_opener = urllib2.build_opener()
                    real_url = html_opener.open(business.website).geturl()
                except:
                    logging.debug("failed to find url")
                    business.pic_link = ""
                    real_url = ""
                    
                logging.debug("real_url = " + unicode(real_url))
                if (real_url != ""):
                    if (business.pic_link.startswith("..")):
                        business.pic_link = real_url.rpartition("/")[0].rpartition("/")[0] + "/" + business.pic_link.lstrip("..")                                  
                    elif (not business.pic_link.startswith("http")):
                        business.pic_link = real_url.rpartition("/")[0] + "/" + business.pic_link.lstrip(".") 
                logging.debug("pic_link = " + unicode(business.pic_link))
                
        #add new business to data base
        business.put()
        #find longitude and latitude. if not find delete it.
        lat_lon_lookup(business, lang)
        if (deal != None):
            if (not is_deal_lst):
                deal_lst = list()
                deal_lst.append(deal)
            else:
                deal_lst = deal
            for deal_elem in deal_lst:
                #find deal type
                deal_type_finder(business, deal_elem)
                #add deal
                deal_elem.business_item = business
                #default deal rating for auto parsing
                deal_elem.rate_num = deal_elem.rate_num + 1 
                deal_elem.rate_sum = deal_elem.rate_sum + 5
                deal_elem.put()
    else:
        old_business._dateAdded = business._dateAdded
        old_business.save()
        if (deal != None):
            if (not is_deal_lst):
                deal_lst = list()
                deal_lst.append(deal)
            else:
                deal_lst = deal
            #update business_item
            for deal_elem in deal_lst:
                deal_elem.business_item = old_business
                #search for exist deal
                old_deal = get_exist_deal(deal_elem)
                if (old_deal != None):
                    deal_renew(old_deal, deal_elem)
                else:
                    deal_elem.business_item = old_business
                    deal_elem.put()


def deal_renew(old_deal, deal):
    old_deal.deal_info = deal.deal_info
    old_deal.days_bitmap = deal.days_bitmap
    old_deal.time = deal.time
    old_deal.time_delta = deal.time_delta
    old_deal.save()

##
#validate user exist and has a credit and add -1 to his credit
#return False if has no credit
#create new user if dont exist
def validate_user_credit(user_name, email):
    
    #TODO: enable feature
    #return True
    
    if (email == None or email == ""):
            return False
        
    user_list = User.all()
    user_list.filter("email ==",email)
    
    user = user_list.get()
    if (user == None):
        #create new user
        user = User()
        user.credit = 5
        user.user_name = user_name
        user.email = email
        user.likes = ""
     
    if (user.credit == 0):
        return False
        
    user.credit = user.credit - 1
    user.put()    
    return True
    

##
##find longitude and latitude. if not find delete it.
PRIVATE_KEY = "mGg0sxUHWDL-eYYpvlf-j8AF"
gmaps = GoogleMaps(PRIVATE_KEY)
def lat_lon_lookup(business, lang):
    if (not (business.latitude == None or business.latitude == "" or business.longitude == None or business.longitude == "")):
        return
    
    logging.info("calc business")
    #logging.debug("xml info before:")
    #logging.debug(business.to_xml())
    
    address = business.full_address
    try:
        lat, lng = gmaps.address_to_latlng(address, lang)
    except GoogleMapsError:
        business._delete = 1
        business.save()
        logging.info("did not found business gepoint, delete business")
        logging.debug(business.to_xml())
        return
        
    business.latitude = unicode((int) (math.pow(10, 6) * lat))
    business.longitude = unicode((int) (math.pow(10, 6) * lng))
    if (business.latitude == None or business.latitude == "" or business.longitude == None or business.longitude == ""):
        business._delete = 1
        business.save()
        logging.info("did not found business, delete business")
        logging.debug(business.to_xml())
    else:
        logging.info("found geopoint")
        #logging.debug("xml info after:")
        #logging.debug(business.to_xml())
        business.save()


###
#parse deal_info and find deal type
#default type is business type
def deal_type_finder(business, deal):
    logging.info("Start deal type finder")
    #check if already checked for this deal
    if (deal.deal_type != None):
        return
    deal.deal_type = ""
    
    #run on all types
    for deal_type in Consts.DEAL_TYPES_ALL:
        deal_type_name = deal_type[0]
        deal_type_hints = deal_type[1]
        #logging.debug("deal type name = " + deal_type_name + " deal_type_hints = " + unicode(deal_type_hints))
        
        #search for hints
        for hint in deal_type_hints:
            if ((deal.deal_info != None and deal.deal_info.lower().find(hint) != -1) or (deal.name != None and deal.name.lower().find(hint) != -1)):
                logging.debug("found deal_type = " + deal_type_name)
                deal.deal_type = deal_type_name
                break
        if (deal.deal_type != ""):
            break
    #default value for type if not find hint
    if (deal.deal_type == None or deal.deal_type == ""):
        if (business.business_type == Consts.REST[1][0]):
            deal.deal_type = Consts.REST[0]
        elif (business.business_type == Consts.PUB[1][0]):
            deal.deal_type = Consts.PUB[0]
        else:
            deal.deal_type = Consts.PARTY[0]
            
    
    logging.info("Finish deal type finder")
    
def get_type_lst_from_type_btm(query_type):
    res = list()
    
    if (query_type != "all" and query_type != "" and query_type != None):
        query_type_btm = string.atoi(query_type)
    else:
        query_type_btm = 0xFFFFFFFF
        
    
    for business_type in Consts.BUSINESS_TYPES_ALL:
        business_type_bit = business_type[0]
        business_type_str = business_type[1]
        
        if ((business_type_bit  & query_type_btm) != 0):
            res.append(business_type_str)
    
    return res


