#
#	All the functions that we need
#

from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from django.utils import simplejson
from urllib2 import *
from google.appengine.ext import db

#import json
import header
import footer
import time
import datetime

class Tweet_model(db.Model):
	id_str = db.StringProperty()
	place = db.StringProperty()
	geo = db.StringProperty()
	in_reply_to_user_id_str = db.StringProperty()
	coordinates = db.StringProperty()
	created_at = db.DateTimeProperty()
	retweet_count = db.IntegerProperty()
	retweeted = db.BooleanProperty()
	id_long = db.IntegerProperty()
	text = db.StringProperty()

class Functions(webapp.RequestHandler):

#use this to get the lat and lon
# http://maps.googleapis.com/maps/api/geocode/json?address=northgate+college+station,tx&sensor=false
#use this to get the tweeets from a given Lat,Lon also make sure you check all of the pages. Up to about page 15
# http://search.twitter.com/search.json?&rpp=100&page=1&geocode=30.615995,-96.351321,5mi&include_entities=true&result_type=recent

  def get_rate_limit(self): # returns only remaining hits
    url = "http://api.twitter.com/1/account/rate_limit_status.json"
    f = urlopen(url)
    rate_limit_status = simplejson.loads(f.read())
    f.close()
    return rate_limit_status['remaining_hits']
    
  def get_rate_limits(self): #prints out all stats
    url = "http://api.twitter.com/1/account/rate_limit_status.json"
    f = urlopen(url)
    rate_limit_status = simplejson.loads(f.read())
    f.close()
    return rate_limit_status

  def getLatLon(self,locationstring): #gets the lat lon based on a location EX northgate+college+station,tx = Lat,lon
    latlon = ""
    #google maps api to get lat,lon based on location
    url = 'http://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false' % (locationstring)

    f = urlopen(url)
    while not f:
        f = urlopen(url)
        print "bad"
    # Transfer the data to JSON format
    results = simplejson.loads(f.read())
    print results
    for each in results['results']:
        latlon += str(each['geometry']['location']['lat']) + "," + str(each['geometry']['location']['lng'])
 
        break          
    f.close()

    return latlon
  
  #this functions gets all of the markers from tweets
  def getMarkers(self,locationstring):
    markers = ""
    basemarker = "&markers="
    
    #gets the lat,lon from the location string
    latlon = self.getLatLon(locationstring)
    
    #this for loop goes through the pages, there is a max of 15 pages per request
    for i in range(2): #change to 15 once we are sure it works. Set on 3 now so we dont waste requests 
 
        url = 'http://search.twitter.com/search.json?&rpp=20&page=%d&geocode=%s,5mi&include_entities=true&result_type=recent' %(i+1,latlon)
        if self.get_rate_limit() < 2:
            return markers
            
        f = urlopen(url)
 
        if not f:
            return markers
            
        # Transfer the data to JSON format
        tweets = simplejson.loads(f.read())
        for each in tweets['results']:
            #check if the tweet has a lat,lon and if so add it to the markers string        
            if each['geo'] is not None:
                tempa = str(each['geo']['coordinates'])
                temp= tempa.replace(" ",'')
                temp= temp.replace("]",'')
                temp= temp.replace("[",'')
                markers += basemarker + temp
                if len(markers) > 1850: # the URL can only be a certian length. If its too long then it doesnt work
                    return markers
        f.close()
        if 'next_page' not in tweets: # if there is no next page, then return what we have
            print "THE END OF ALL TIME"
            return markers
            
  
    return markers
  
  def stalk_NG(self):
    try:
     t_database = db.GqlQuery("SELECT * FROM Tweet_model limit 50" )
    except:
     pass
    locationstring = "northgate+college+station,+tx"
    #gets the lat,lon from the location string
    latlon = self.getLatLon(locationstring)
    
    for i in range(15): #change to 15 once we are sure it works. Set on 3 now so we dont waste requests 
 
        url = 'http://search.twitter.com/search.json?&rpp=100&page=%d&geocode=%s,5mi&include_entities=true&result_type=recent' %(i+1,latlon)
        if self.get_rate_limit() < 2:
            return "No more requests - " + str(self.get_rate_limits())
        f = urlopen(url)
 
        if not f:
            return "Fail to open - " + str(self.get_rate_limits())
            
        # Transfer the data to JSON format
        tweets = simplejson.loads(f.read())
        for p in tweets['results']:
            #check if the tweet has a lat,lon and if so add it to the markers string        
            if p['geo'] is not None:
                m = Tweet_model()
                
                duplicate = False
                m.id_str = str(p["from_user"])
                m.place = str("None")
                m.geo = str(p["geo"])
                m.in_reply_to_user_id_str = str("None")
                m.coordinates = str(p['geo']['coordinates'])
                
                raw_date = str(p["created_at"])
                raw_date = raw_date.replace('\"','')
                extracted_date = (time.strptime(raw_date,"%a, %d %b %Y %H:%M:%S +0000"))
                
                m.created_at = datetime.datetime(extracted_date.tm_year,
                extracted_date.tm_mon,
                extracted_date.tm_mday,
                extracted_date.tm_hour,
                extracted_date.tm_min,
                extracted_date.tm_sec
                )
                m.retweet_count = 0
                m.retweeted = bool("False")
                m.id_long = long((p["id"]))
                try:
                    m.text = str(p["text"])
                except:
                    m.text = "Error"
                m.text = m.text[:500]
                
                try:
                    for _t in t_database:
                        if(m.text == _t.text):
                            duplicate = True
                        if(not duplicate):
                            m.put()
                except:
                    m.put() 
                
        f.close()
        if 'next_page' not in tweets: # if there is no next page, then return what we have
            print "THE END OF ALL TIME"
            return "No next page  - " + str(self.get_rate_limits())
 
      
  def makeMap(self,zoom,location,city):
    string = '<img src="http://maps.googleapis.com/maps/api/staticmap?center='
    location = location.replace(" ","+")
    location = location.replace(",","+")
    string += location + "+"
    city = city.replace(" ", "+")
    string += city
    string += '&zoom=' + str(zoom)
    string += '&size=512x512&maptype=roadmap'
    locationstring = location +"+"+city
    
    #add markers
    string += self.getMarkers(locationstring)
    string += '&sensor=false" width="463" height="446" alt="" class="fl" />'
    
    #string += '<p><br><br> Remaining Hits for Twitter = %d </p> ' % (self.get_rate_limit()) # delete this line later
    string += '<p><br><br> Remaining Hits for Twitter = %s </p> ' % (self.get_rate_limits()) # delete this line later
    return string
    
  

if __name__ == '__main__':
    main()