#this can be used to test whatever you need to

from urllib2 import *
import json



# Crawl a Twitter user's followers IDs given the user's ID
def get_follower_ids():

      # The API to get user profile
      url = 'http://search.twitter.com/search.json?&rpp=100&page=1&geocode=30.615995,-96.351321,5mi&include_entities=true&result_type=recent'

      f = urlopen(url)
      while not f:
          f = urlopen(url)
          print "bad"
      # Transfer the data to JSON format
      tweets = json.loads(f.read())
      print tweets
      for each in tweets['results']:
          
          if each['geo'] is not None:
              tempa = str(each['geo']['coordinates'])
              temp= tempa.replace(" ",'')
              temp= temp.replace("]",'')
              temp= temp.replace("[",'')
              print temp 
      f.close()
      return tweets


# Show current status of rate limit
def get_rate_limit():
  try:
      url = "http://api.twitter.com/1/account/rate_limit_status.json"
      f = urlopen(url)

      rate_limit_status = json.loads(f.read())

      f.close()
      
      return rate_limit_status
  except:
      print "Error getting rate limit, Sleeping for 10 sec then moving on"
      time.sleep(10)
      return []

def main():
    

    print '\n\n------------------------------------\n\n'
    rate_limit = get_rate_limit()
    print rate_limit
    follower_ids = get_follower_ids()
    


if __name__ == '__main__':
    main()
