import tweepy
import os
from optparse import OptionParser
import sys
from urllib import urlencode
from numpy import *
from numpy import matrix
from numpy import linalg
import re
import locale
import simplejson
import time
import pdb
import urllib2
import signal
from mailer import Mailer
from mailer import Message
 
# This script is for use in phoenix.tudelft.net

class TimeoutException(Exception): 
	pass

class RecommendedFollowers:
	
	def __init__(self, inputfilepath, outputfilepath, screen_name):
		self.inputfile = inputfilepath
		self.outputfile = outputfilepath
		self.root_screen_name = screen_name
	
		# Account 0 (Imp22Fol)
		#Authentication tokens for Twitter, authenticate, and check rate limit status
		self.auth0 = tweepy.OAuthHandler("i6vYqkDgmUZx2SGBnguzg", "VhFKWet5p6JmWKX89fITvDpl0praa0TO8V7ba0QSl5U")
		self.auth0.set_access_token("335838961-tYqkaP65QyYHiHb5cE1CGV6zB7SIpIQArMqybs", "eHK9NyCX3TtuvKJBKVs3DIKr2EMfh3x508Z3UT56EJE")
		self.api0 = tweepy.API(self.auth0)
		self.my_rate_limit_status0 = self.api0.rate_limit_status()

		# Account 1 (Conv2Fol)
		# Authentication tokens for Twitter, authenticate, and check rate limit status
		self.auth1 = tweepy.OAuthHandler("uEaf4MBF9pu6ePTJT0T3w", "uTwMUJGo6dqciEJ1vVrCJ3wgHrSYR9Vysz2TJ01GLyA")
		self.auth1.set_access_token("335841614-072kBBbxiWHcezvN3Eo9VmMzFnGbehUrqWMXp0Tu", "03jqfVjGPXe219hJHNFBlkKK96igdsvyoFiitnuoJ8")
		self.api1 = tweepy.API(self.auth1)
		self.my_rate_limit_status1 = self.api1.rate_limit_status()
		
		# Account 2 (Int2Fol)
		# Authentication tokens for Twitter, authenticate, and check rate limit status
		self.auth2 = tweepy.OAuthHandler("P07XwnJHUN3dqQxG4zUpw", "fJVg6q79jdtaPNvh2tc1ipv8x9lEiTcwzRf3C24nFQ")
		self.auth2.set_access_token("335844930-HmUJwXcIYbyhvFZE0xCuOZbAIvWjR2gEGV83AxsK", "MTJ2GOv02777TreJkawARTr2YQSMb3UoEjPPCefnGPA")
		self.api2 = tweepy.API(self.auth2)
		self.my_rate_limit_status2 = self.api2.rate_limit_status()

		# Account 3 (Con2Fol)
		# Authentication tokens for Twitter, authenticate, and check rate limit status
		self.auth3 = tweepy.OAuthHandler("FGlMRu1UBRD0K2a0BYBj1w", "ra1uQ6RXpupAzDotakWchgaCrKG5nSlmcST6J5XTl6c")
		self.auth3.set_access_token("335853926-wUuKeH3I4z46LlTVIZ06vyAt3NXSRuINQJj3sYuJ", "qW93vOnz7E7Tq1pMItsEt8bPt2oH04A2WukDP3FplU")
		self.api3 = tweepy.API(self.auth3)
		self.my_rate_limit_status3 = self.api3.rate_limit_status()

		self.account_number = 1
		self.expander_service = 1

		# Lists that will be used for users and their items
		self.items = []
		self.users = []
		self.recommended_followers = []
		
		self.filtered_items = []
		self.filtered_users = []
		
		self.total_items = []
		self.total_filtered_items = []

		# Initialize Twitter (multiple accounts) and Backtype API calls
		self.api_calls = {0:0, 1:0, 2:0, 3:0}
		self.number_exceptions = {0:0, 1:0, 2:0}
				
		# This is the set of entities extracted from TweetUM for user TechCrunch based on that user's
		# 30 latest followees (18Aug2011)
		self.tweetum_entities = set(['Canada', 'non-seed', 'coach', 'versatile', 'Wilson', 'Leopard', 'Buffett', 'SEOmoz', 'Moon', 'manager', 'Babysitter', 'edu', 'Schmidt', 'Louisiana', 'Czar', 'Brown', '@rentcycle', 'Hyer', 'disorders', '@mcuban', 'Christiane', 'Dam', 'Javascript', 'Fremont', 'Snowball', 'Stem', 'PeopleBrowsr', 'Ranger', 'Lapeer', 'ARPU', 'Now', 'Day', '@NewMEAccel...let', 'smartphone', 'Whitman', 'GQ', 'www.facebook.com/event.php?eid=222193311155586', 'Ventures', 'Alaska', 'results', 'Sheryl', 'GE', 'cellular', 'GO', 'NYU', 'rapper', 'Jersey', 'cool', 'Spy', 'Parker', 'Buckaroo.com', 'Ruby', 'Osaka', 'Global', 'michael', 'solution', 'Marilyn', 'Goodnight', 'unrelated', 'Jon', 'VP', 'businesses', 'Paul', 'Seattle', 'George', 'Kevin', 'jmatthews@scenechat.com', 'hunter', 'LaTeX', 'Paleo', 'HTTP', 'Howard', 'phones', 'consistent', 'MySQL', 'Fast', 'chef', 'Ricoh', 'Here&apos;s', 'video', 'Rick', 'Acquia', 'Farmer', 'East', '3G', 'Red', 'www.cuyana.com', 'Arizona', 'Mongolab', 'Boyd', 'TWiT', 'Roll', 'cell', 'Batali', 'Amazon', 'HTML', 'Hamilton', 'Sao', 'above', 'Mich', 'relational', 'America', 'Sally', 'Bin', 'Classic', 'Ma', 'Libya', 'reporter', 'Charlie', 'Professor', 'NFL', 'water', 'ranger', 'Hoover', 'studio', 'Sunnyvale', 'Ohio', 'Times', 'Search', 'VMware', 'alto', 'search', 'Dean', 'Stream', 'Scoop', 'brilliant', 'Queen', 'Clara', 'SF', 'MT', 'Bikes', 'products', 'advertising', 'Palo', 'social', 'Jared', 'Roland', 'Technology', 'patents', 'Staff', 'Mountain', 'tu', '408-884-3889', 'Robert', 'Daniels', 'psychological', 'Nick', 'Louisville', 'O&apos;Brien', 'Safety', 'S.', '@wjarek', 'Eminem', 'tools', 'MSFT', 'Europe', 'Australia', 'sghopwood@scenechat.com', 'August', 'Comerica', 'Isaak', 'Beerworks', 'process', 'Royce', 'music', 'Novell', 'injury', 'TechCrunch', 'Fela', 'CTOs', 'Bronfman', 'PayPal', 'Intel', 'Mount', 'Fonts', 'Orlando', 'Chopra', 'Devices', 'site', 'team@mongolab.com', 'Morocco', 'antibiotics', 'tim@rentcycle.com', 'Time', 'olive', '#internet', 'Paolo', 'Spotlight', 'Duke', 'bartender', 'Johnny', 'Tim', 'car', 'athlete', 'Library', 'Toronto', 'Entrepreneur', 'mp', 'Boston', 'Neko', 'Children', '@Tagged', 'spy', 'IBM', 'Lil', 'Media', 'Nepal', 'Hudson', 'Jerry', 'Virtual', 'Reality', 'stroke', 'PRELL', 'Scenechat!', 'Party', 'Hertzfeld', 'Glasgow', 'Smartphone', 'court', 'HIV', 'LA', 'Beach', 'travel', 'Wampanoag', 'Dell', 'Christmas', 'Arrington', 'Jordan', 'Jack', 'VentureBeat', 'Atom', '#travel', 'Kuper', 'Rental', 'Skyport', 'Robin', 'Soledad', 'designer', 'e-beam', 'Renault', 'Tea', 'www.doubletwist.com', 'Ryan', 'Garcia', 'Player', 'designed', 'Yale', 'Eleanor', 'law', 'View', 'Luidia', 'natural', 'Apple', 'www.facebook.com/pages/OLyfe/139761632754778', 'Mexico', 'jobs@mongolab.com', 'MySpace', 'Windows', 'BeCouply', 'Middle', 'Radio', 'Sean', 'www.Olyfe.com', 'Inigral', 'ethernet', 'BostonConsultingGroup', 'Monroe', '@couchsessions', 'Web-based', 'What', 'comms', 'Jay', 'september', 'Smith', '@cmusico', 'jpeg', 'Illinois', 'Jim', '@rexly', 'White', 'MIT', 'Sheen', 'developer', 'RT', 'I&apos;ve', '@navarrowwright', 'Full', 'SAN', 'Verizon', 'Berners-Lee', 'geo', 'OFFICIAL', 'translation', 'Xerox', 'html', '@kimazille', 'systems', 'Chernin', '2011', '@alexia', 'Spain', 'Buck', 'Mobile', 'docs', 'Kapor', 'Michael', 'Interpol', 'Bay', 'iPhone', 'Cam', 'Social', 'ninja', 'Closing', 'republican', 'day', 'Karina', 'Adobe', 'San', 'Mobility', 'India', 'curator', 'capability', 'courses', 'API', 'Skype', 'Twitpic', 'Riggins', 'Aneesh', 'Sanjay', 'MyLife', 'Tokbox', 'bump', 'Farhad', 'energy', 'Dude', 'Butcher', 'Geddes', 'ABC', 'P2P', 'Peru', 'programmer', '10v', 'Bubble', 'More', 'Engine', 'Ken', 'network', 'driving', 'CEO', 'Michigan', 'Haha', 'factory', 'Scoble', 'About.com', 'looking', 'Job', 'Strawpoll', 'China', '7', 'Zinch', 'Super', 'Class', 'Napoleon', 'Aniston', 'AlwaysOn', 'support@mongolab.com', 'farmer', 'Citibank', 'ipod', 'Let&apos;s', '@bryan_hunter', 'CNN', 'Vic', 'www.scenecha', 'Congress', 'Madrid', 'Chan', 'days', 'pence', 'Vischer', 'IPv6', 'app', 'Cafe', 'www.scenechat.com', '@eBeam', 'Duh', 'Turkey', 'Cooper', 'Blanco', 'ISP', 'Rex', 'Black', 'co-founder', 'engineer', '@xrite', 'Jennifer', 'raksha@lmgpr.com', 'North', 'Botsman', 'service', 'Victoria&apos;s', 'Effect', 'Gallardo', 'paint', 'station', 'Woo', 'Nintendo', 'Rentcycle&apos;s', 'Iraq', 'Rebecca', 'store', 'Azam', 'Davos', 'Justin', 'hub', 'Gates', 'tool', '#imissnyc', 'BBC', 'Miracle', 'dentist', 'www.facebook.com/contour?sk=app_190208911039132', 'Girod', 'Oracle', 'PDF', 'XP', 'eBay', 'Haiti', 'Inc.', 'Gundotra', 'king', 'Dagger', 'Colorado', 'Creator', 'bubble', 'players', 'Herman', 'Resurfacing', 'silly', 'Gang', 'Sea', 'Forehand', 'Peter', 'Zink', 'Welcomes', 'and', 'Diego', 'Cosmonaut', '3M', 'arin@toodo.com', 'Chicago', 'MicroCell', 'DC', 'Secret', 'BLACK', 'gigabit', 'Interview', 'dinner', 'Valley', 'OpenTable', 'mountain', 'dale@uncollege.org', 'support@contour.com', 'Food', 'Kyoto', 'Price', 'Money', 'Da', 'www.getquik.com', 'Minister', 'Santa', 'online', 'Alpha', 'Gallery', 'Kuti', 'Cruze', 'TCP/IP', 'Linux', '@marcidale', 'Tho', 'Korea', '@sfoutsidelands', 'Kanye', 'alpha', 'Mohit', 'Abraham', 'Portland', 'singer', 'Google', 'Capital', 'Lewis', 'disease', 'Asta', 'tech', 'media/social', 'caching', 'Earth', 'Herring', '.com', 'renting', 'Web', 'Matt', 'Ian', 'Blount', 'Cisco', 'Paris', 'Barry', 'Karla', 'queen', 'Dolores', 'Craigslist', '@contour_cam', 'Brookins', 'Lennon', 'solutions', 'Marks', 'OnStar', 'networks', 'Businessweek', 'Kingdom', 'Crux', 'producer', 'Sacramento', 'writer', 'paranoia', 'in-car', 'wayne', 'York', 'Cottage', 'TechTarget', '@oLyfe', 'Prime', 'Grossman', 'sunglasses', '#solutions', '@upenzi', 'Willin&apos;', 'Louis', '@abenton', 'Magazine', 'Houston', 'www.mealison.com', 'Shops', 'chips', 'Stadium', 'GPS', '+1-206-792-5226', 'artist', 'True', 'Washington', 'Kong', '#collaboration', 'INTERNET', 'PHP', 'Twitter', 'view', 'FDA', 'Arbor', 'EMC', 'King', 'w/', 'Dogg', 'Tesco', 'sez', 'France', 'analytics', 'Motorola', 'result', 'GBP', 'Grand', 'Chamillionaire', 'Android', 'John', 'Malawi', 'CRM', 'Linden', 'County', 'Good', 'Iowa', 'luv', 'mine', 'Tool', 'Park', 'Canyon', 'artificial', 'mayor', 'Genecure', 'Rexly', 'Harvard', 'Texas', 'Amazon.com', 'Sony', 'Technologies', 'Republican', 'Hopwood', 'therapies', 'conferencing', 'Hip', 'Dusty', 'Florida', 'Youtube', 'Edgar', 'Silicon', 'Subway', 'Annie', '@NewMeAccel', 'news', 'Hansen', '@orchestra.io', 'design', 'instant', '+@bryan_hunter', 'restaurant', 'drug', 'patti', 'Steve', 'Gillmor', 'XM', 'Computer', 'Contrave', 'arin@tood.com', 'auctions', 'Oscar', 'interactive', 'Whip', 'Primark', 'AOL', 'cancer', 'Zane', 'smith', 'YESYESYES', 'Song/artist', 'Amanpour', '@datingheadshots', 'Lounge', 'Los', 'Norway', 'Mt.', 'Ireland', 'Online', 'Dating', 'Willie', 'Meadows', '@stephendeberry', 'swimmer', 'Yahoo', 'Tacos', 'Brooklyn', 'California', 'B.I.G', 'Doctors', 'St.', 'dev@toodo.com', 'oDesk', 'engine', 'Georgia', 'Adams', 'Tokyo', 'Poland', 'Phil', 'gas', 'Philadelphia', 'Tackle', 'Kamangar', 'Dismissed', 'News', '@baptistejesse', 'Conway', 'Ann', 'Rachel', 'Everest', 'ventures', 'b&apos;shevat', 'voice', 'radio', 'Kottke', 'Building', 'Alfano', 'Italy', 'CATHERINE', 'Vannevar', 'Oakley', 'Krugman', 'hardware', 'player', 'Bush', 'Mini', 'properties', 'Prison', 'ready', 'Valencia', 'foreign', 'technology', 'Kareem', 'Georgetown', 'facial-recog', 'author', 'media', 'Bangkok', 'food', 'Leith', 'speaker', 'Handbag', 'party', 'CruiseControl.NET', 'Business&apos;s', 'oil', 'http', 'Institute', 'Cox', 'AERO', 'driver', 'Adam', 'Angeles', 'boston', 'portal', 'php', 'diabetes', 'Yard', 'Salar', 'Game', 'ROTFLMAO', 'Miller', 'NextUp', 'Mine', 'fever', 'Ppl', 'States', 'Club', 'www.getquik.com/pizza-my-heart-sunnyvale', 'sunnyvale', 'telephone', 'audio', 'browsing', 'BOY', 'HBO', 'United', 'ceo', 'Oslo', 'TechStars', 'executive', 'Access', 'Daniel', 'virtual', 'Internet', 'injuries', 'Marvell', 'web', 'Jobs', 'Snoop', 'candidate', '#sfGiants', 'Lupe', 'photographer', 'Company', 'YouTube', '@jtwebman', '@BeCouply', 'Victoria', 'Zuckerberg', 'haz', 'Starbucks', 'FriendFeed', 'DVD', 'Fab', '5', 'easter', 'Bremmer', 'co-Founder', 'Alto', 'Sprint', 'Norton', 'Restaurant', 'Saratoga', 'James', 'Cells', 'Patagonia', 'Reginald', 'Snow', 'I/O', 'Billy', 'traffic', 'Nelson', 'Notorious', 'checkins', 'Talk', 'GetQuik', 'Bellingham', 'Yo-Yo', 'InformationWeek', '@contour_cam..check', 'Pie', 'server', 'communications', 'London', 'SQL', 'JUDGE', 'Stephan', 'Inspector', 'Golfer', 'AT&amp;T', 'Warren', '@hajjflemings', 'Perry', 'Java', 'Arcade', 'Marc', 'West', 'Garrett', 'HQ', 'Pius', 'Mark', 'streaming', 'Plugandplay', 'Doubletwist', 'Andy', 'Editor', 'palo', 'Try', 'New', 'sprint', 'Jonathan', 'Bubba', 'HD', 'OSX', 'Mike', '#collcons', 'Ace', 'for', 'Singapore', 'Fire', 'thanksgiving', '@Kate_Butler', 'Leo', 'Togo', 'Walden', 'Facebook', 'Bieber', 'Armenia', 'Chris', 'Barros', 'commentator', 'christmas', 'Syracuse', 'Rentcycle', 'Google+', 'Bill', 'David', 'Josaitis', 'Dakota', 'Sweden', 'www.thisweekin.com', 'Kindle', 'Microsoft', 'vaccine', 'contact@buckaroo.com', 'Becky', 'Madison', 'Taylor', 'HTC', 'Ron', 'industry', 'whiteboard', 'favorite', 'Heights', 'Buckaroo', 'WIRED', 'airline', 'Femi', 'Cameron', 'Japan', 'iPads', 'Captain', 'software', 'Lord', 'Hong', 'Eli', 'Memories', 'Majestic', 'AV', 'Bowl', 'Taj', 'entrepreneur', 'Aaron', 'Gaga', 'web-site', 'Alabama', 'networking', 'Ali', 'Shawn', 'people', 'JSON', 'Toshiba', 'Chelsea', 'Vegas', 'editor', 'Chile', 'President', 'forward', 'hysterical', 'HP', 'Obama', 'function', 'head', 'Mahal', 'N.', '#promotion', 'Girls', 'Visual', 'Karel', 'Detroit', 'Wildstrom', 'daylightburnsahole@gmail.com', 'DNS', 'Walker', 'technologies', 'Austin', 'Levchin', 'www.nyinternproject.com', 'Uzamere', '@TechTwNews', 'Zune', 'Awesome', 'Africa', 'Live', 'us', 'technology-enabled', 'Avi', 'Messenger', 'Startups', 'election', 'Jose', 'USD', 'Main', 'Writer', 'CIO', 'Auerbach', 'LinkedIn', 'Abdul-Jabbar', '@arrington', 'sharing', 'rally', 'Ryu', 'Bernd', 'dealer', 'Rheingold', 'Francisco', 'TV', 'Max', '#entrepreneur', 'Utah', 'Andrew', 'Lab', 'reality', 'Mario', 'Jody', 'Mae', 'recognition', '#Disruptive', 'intelligence', 'LivingSocial', 'Discovery', 'Lady', 'Altucher', 'Las', 'Case', 'Great', 'Fiasco', 'Alzheimer&apos;s', 'Stanford', 'students', 'worker', 'Meg', 'stipe', 'Brazil', 'www.Buckaroo.com', 'Derby', 'Sandberg', '@_pius', 'Nations', 'presidential', 'South', 'Lakes', 'Major', 'COACH', '@CMCreativeMedia', 'Gupta', 'REBECCA', 'dog', 'Comcast', 'Hardware', 'Pariser', 'Scott', '#search', 'Jay-Z', 'representative'])
		
		self.Process(self.root_screen_name)
	
	def timeout(timeout_time, default):
	#Timeout function using decorator pattern
	#ref: http://pguides.net/python/timeout-a-function

		def timeout_function(f):
			def f2(*args):
				def timeout_handler(signum, frame):
					raise TimeoutException()

				old_handler = signal.signal(signal.SIGALRM, timeout_handler) 
				signal.alarm(timeout_time) # triger alarm in timeout_time seconds
				try: 
					retval = f(*args)
				except TimeoutException:
					print "TIMEOUT!"
					return None
				finally:
					signal.signal(signal.SIGALRM, old_handler) 
				signal.alarm(0)
				return retval
			return f2
		return timeout_function	

	@timeout(1800, None) # Timeout after 30 minutes	
	def GetUserID(self, screen_name):		
		# Keep track of the Twitter API Calls
		self.api_calls[self.account_number] = self.api_calls[self.account_number] + 1
		print "Api calls for account %s :" %self.account_number + " [%s] " %self.api_calls[self.account_number] + " (from GetUserTimeline)"
		if (self.api_calls[self.account_number] > 325):
			
			# Get the state of all API keys
			self.api_calls[0] = 350 - self.api0.rate_limit_status()["remaining_hits"]
			self.api_calls[1] = 350 - self.api1.rate_limit_status()["remaining_hits"]
			self.api_calls[2] = 350 - self.api2.rate_limit_status()["remaining_hits"]
			self.api_calls[3] = 350- self.api3.rate_limit_status()["remaining_hits"]

			# Ensure from the start that we are using the right API key according to the rate limits.
			while (self.api_calls[self.account_number] > 325):
				self.account_number = (self.account_number + 1) % 4 # Modulo, so that account numbers rotate
				print "Sleeping for 10 minutes while API refreshes ..."
				self.output_logs.write("Sleeping for 10 minutes while API refreshes... \n")
				time.sleep(10*60)

		user_id = None

		try:
			if (self.account_number == 0):
				user_id = self.api0.get_user(screen_name).id
			elif (self.account_number == 1):
				user_id = self.api1.get_user(screen_name).id
			elif (self.account_number == 2): 
				user_id = self.api2.get_user(screen_name).id
			elif (self.account_number == 3):
				user_id = self.api3.get_user(screen_name).id
		except:
			self.output_logs.write("Problem getting user id for user %s" %screen_name  + "Error: Not Authorized \n")
			print "Problem getting user id for user %s:" %screen_name + "Error: Not Authorized"
			pass

		return user_id

	@timeout(1800, None) # Timeout after 30 minutes	
	def GetUserTimeline(self, user_id):		
		# Keep track of the Twitter API Calls
		self.api_calls[self.account_number] = self.api_calls[self.account_number] + 1
		print "Api calls for account %s :" %self.account_number + " [%s] " %self.api_calls[self.account_number] + " (from GetUserTimeline)"
		if (self.api_calls[self.account_number] > 325):
			
			# Get the state of all API keys
			self.api_calls[0] = 350 - self.api0.rate_limit_status()["remaining_hits"]
			self.api_calls[1] = 350 - self.api1.rate_limit_status()["remaining_hits"]
			self.api_calls[2] = 350 - self.api2.rate_limit_status()["remaining_hits"]
			self.api_calls[3] = 350- self.api3.rate_limit_status()["remaining_hits"]

			# Ensure from the start that we are using the right API key according to the rate limits.
			while (self.api_calls[self.account_number] > 325):
				self.account_number = (self.account_number + 1) % 4 # Modulo, so that account numbers rotate
				print "Sleeping for 10 minutes while API refreshes ..."
				self.output_logs.write("Sleeping for 10 minutes while API refreshes... \n")
				time.sleep(10*60)

		user_timeline = None

		try:
			if (self.account_number == 0):
				user_timeline = self.api0.user_timeline(user_id,include_entities=True)
			elif (self.account_number == 1):
				user_timeline = self.api1.user_timeline(user_id,include_entities=True)
			elif (self.account_number == 2): 
				user_timeline = self.api2.user_timeline(user_id,include_entities=True)
			elif (self.account_number == 3):
				user_timeline = self.api3.user_timeline(user_id,include_entities=True)
		except:
			self.output_logs.write("Problem getting user timeline for user %s" %user_id  + "Error: Not Authorized \n")
			print "Problem getting user timeline for user %s" %user_id  + "Error: Not Authorized"
			pass

		return user_timeline		

	@timeout(1800, None) # Timeout after 30 minutes	
	def GetUserFriends(self, user_id):			
		# Keep track of the Twitter API Calls
		self.api_calls[self.account_number] = self.api_calls[self.account_number] + 1
		print "Api calls for account %s :" %self.account_number + " [%s] " %self.api_calls[self.account_number] + " (from GetUserTimeline)"
		if (self.api_calls[self.account_number] > 325):
			
			# Get the state of all API keys
			self.api_calls[0] = 350 - self.api0.rate_limit_status()["remaining_hits"]
			self.api_calls[1] = 350 - self.api1.rate_limit_status()["remaining_hits"]
			self.api_calls[2] = 350 - self.api2.rate_limit_status()["remaining_hits"]
			self.api_calls[3] = 350- self.api3.rate_limit_status()["remaining_hits"]

			# Ensure from the start that we are using the right API key according to the rate limits.
			while (self.api_calls[self.account_number] > 325):
				self.account_number = (self.account_number + 1) % 4 # Modulo, so that account numbers rotate
				print "Sleeping for 10 minutes while API refreshes ..."
				self.output_logs.write("Sleeping for 10 minutes while API refreshes... \n")
				time.sleep(10*60)

		friends_ids = None

		try:
			if (self.account_number == 0):
				friends_ids = self.api0.friends(user_id)
	 		elif (self.account_number == 1):
				friends_ids = self.api1.friends(user_id)
	 		elif (self.account_number == 2): 
				friends_ids = self.api2.friends(user_id)
	 		elif (self.account_number == 3):
				friends_ids = self.api3.friends(user_id)
		except:
			self.output_logs.write("Problem getting user timeline for user %s" %user_id  + "Error: Not Authorized \n")
			print "Problem getting user timeline for user %s" %user_id + "Error: Not Authorized"
			pass

		return friends_ids

	def Process(self, user_id):	
		
		start_time = time.strftime("%Y%m%d-%H""%M")	
		
		outputfilename = self.outputfile
		fqn_filename = outputfilename.split('.')
		
		# Output for normal (unfiltered runs)
		self.output = open(self.outputfile, 'w')
		self.output_info = open(fqn_filename[0] + ".info", 'w')
		self.output_logs = open(fqn_filename[0] + ".logs", 'w')
		
		# Output for TweetUM filtering
		self.output_filtered = open(fqn_filename[0] + "_filtered.out", 'w')
		self.output_filtered_info = open(fqn_filename[0] + "_filtered.info", 'w')
		self.output_filtered_logs = open(fqn_filename[0] + "_filtered.logs", 'w')
		
		self.output_logs.write("Rate Limit Account 0: %s" %self.api0.rate_limit_status() + "\n")
		self.output_logs.write("Rate Limit Account 0: %s" %self.api1.rate_limit_status() + "\n")
		self.output_logs.write("Rate Limit Account 0: %s" %self.api2.rate_limit_status() + "\n")
		self.output_logs.write("Rate Limit Account 0: %s" %self.api3.rate_limit_status() + "\n")
		
		print "Rate Limit Account 0: %s" %self.api0.rate_limit_status() + "\n"
		print "Rate Limit Account 0: %s" %self.api1.rate_limit_status() + "\n"
		print "Rate Limit Account 0: %s" %self.api2.rate_limit_status() + "\n"
		print "Rate Limit Account 0: %s" %self.api3.rate_limit_status() + "\n"	
		
		# Get the state of all API keys
		self.api_calls[0] = 350 - self.api0.rate_limit_status()["remaining_hits"]
		self.api_calls[1] = 350 - self.api1.rate_limit_status()["remaining_hits"]
		self.api_calls[2] = 350 - self.api2.rate_limit_status()["remaining_hits"]
		self.api_calls[3] = 350- self.api3.rate_limit_status()["remaining_hits"]

		# Ensure from the start that we are using the right API key according to the rate limits.
		while (self.api_calls[self.account_number] > 325):
			self.account_number = (self.account_number + 1) % 4
			print "Sleeping for 10 minutes while API refreshes ..."
			self.output_logs.write("Sleeping for 10 minutes while API refreshes... \n")
			time.sleep(10*60)
			
		user_id = self.GetUserID(self.root_screen_name)	
			
		# Add user_id to Users list, if not there already	
		if (self.users.count(user_id) == 0):
			self.users.append(user_id)

		# Add user_id to filtered users list, if not there already	
		if (self.filtered_users.count(user_id) == 0):
			self.filtered_users.append(user_id)

		# Get the last tweets published by user
		self.output_logs.write("Getting timeline for user %s \n" %user_id)
		print "Getting timeline for user %s" %user_id
		try:
			tweets = self.GetUserTimeline(user_id)
		except:
			self.output_logs.write("Nothing further is possible if not authorized to get user_timeline from user %s \n" %user_id)
			print "Nothing further is possible if not authorized to get user_timeline from user %s" %user_id
			raise

		if not(tweets is None):
			# Process the tweets of user
			self.ProcessTweets(user_id, tweets)
		else:
			print "Tweets empty!"
		
		# Process the tweets of friends of user
		self.ProcessRecommendedFriendsTweets(user_id)
		
		self.output_logs.write("Finished! \n")
		self.output_logs.write("Number of unique users: %s \n" %len(self.users))
		self.output_logs.write("Number of unique items: %s \n" %len(self.items))
		
		self.output_filtered_logs.write("Finished!\n")
		self.output_filtered_logs.write("Number of unique users: %s \n" %len(self.filtered_users))
		self.output_filtered_logs.write("Number of unique items: %s \n" %len(self.filtered_items))	
		
		# Close output files
		self.output.close()
		self.output_info.close()
		self.output_logs.close()
		self.output_filtered.close()
		self.output_filtered_info.close()
		self.output_filtered_logs.close()
				
		# Wait 5 minutes for all content to finish coming in
		time.sleep(5*60)
		
		# Create remote directory on phoenix.tudelft.net and copy the output file from this run
		# to that directory.
		remote_output_dir = "/Users/ocastaneda/Documents/TU_Delft/2010/IN5000/Tools/TweetPredict/output/phoenix.tudelft.net/output/output-%s" %start_time + "-%s" %self.root_screen_name	
		os.system("mkdir %s" %remote_output_dir)
		time.sleep(60)
		os.system("cp %s" %self.outputfile + " %s" %remote_output_dir)
		time.sleep(60)
		os.system("cp %s" %fqn_filename[0] + "_filtered.out %s" %remote_output_dir)
		time.sleep(60)
				
		#[0]/Users[1]/ocastaneda[2]/Documents[3]/TU_Delft[4]/2010[5]/IN5000[6]/Tools[7]/TweetPredict[8]/output[9]/output-[10]/filename[11]
		#[0]/[1]/[2]/[3]/[4]/[5]/[6]/[7]/[8]/[9]/[10]/[11]
		fqn_fqn_filename = outputfilename.split('/')
		pqn_filename = fqn_fqn_filename[11]
		pqn_pqn_filename = pqn_filename.split('.')
		
		# Run the UserBasedEvaluator java program and create the output locally
		os.system("ssh ocastaneda@145.94.40.203 'source /Users/ocastaneda/.bash_profile ; java UserBasedEvaluator %s" %remote_output_dir + "/%s" %fqn_fqn_filename[11] + "' > %s" %fqn_filename[0] + ".mahout")
		time.sleep(60)
		os.system("ssh ocastaneda@145.94.40.203 'source /Users/ocastaneda/.bash_profile ; java UserBasedEvaluator %s" %remote_output_dir + "/%s" %pqn_pqn_filename[0] + "_filtered.out' > %s" %fqn_filename[0] + "_filtered.mahout")
		time.sleep(60)
		
		precision = "-1"
		recall = "-1"
		precision_filtered = "-1"
		recall_filtered = "-1"
		
		output_evaluated = open(fqn_filename[0] + ".mahout", 'r')
		for line in output_evaluated:
			irstats = line.split(',')
			precision = irstats[0]
			recall = irstats[1]
		output_evaluated.close()
		
		output_filtered_evaluated = open(fqn_filename[0] + "_filtered.mahout", 'r')
		for line in output_filtered_evaluated:
			irstats = line.split(',')
			precision_filtered = irstats[0]
			recall_filtered = irstats[1]
		output_filtered_evaluated.close()

		# e.g. 8/21/2011 4:21:00 PM
		end_time = time.strftime("%d/%m/%Y %H:%M")	
		
		# Append summary stats to running file
		self.summary_output = open("/Users/ocastaneda/Documents/TU_Delft/2010/IN5000/Tools/TweetPredict/output/phoenix.tudelft.net/summary-RFN-TechCrunch.csv" , 'a')
		self.summary_output.write("%s" %len(self.total_items) + ",%s" %len(self.total_filtered_items) + ",%s" %len(self.users) + ",%s" %len(self.items) + ",%s" %len(self.filtered_users) + ",%s" %len(self.filtered_items) + ",%s" %precision + ",%s" %recall + ",%s" %precision_filtered + ",%s" %recall_filtered + ",%s" %end_time)
		self.summary_output.close()	
				
		print "Finished!"
		print "Number of unique users: %s" %len(self.users)
		print "Number of unique items: %s" %len(self.items)
	
		print "Number of unique filtered users: %s" %len(self.filtered_users)
		print "Number of unique filtered items: %s" %len(self.filtered_items)
		
		# Send an email with summary of this run
		
		message = Message(From="phoenix@tudelft.nl", To="interestingfollower@gmail.com")
		message.Subject = "Phoenix: RFN finished!"
		message.Body = "Here's the summary: \n\nTotal number of items: %s" %len(self.total_items) + "\nTotal number of filtered items: %s" %len(self.total_filtered_items) + "\n\nNumber of unique users: %s" %len(self.users) + "\nNumber of unique items: %s" %len(self.items) + "\n\nNumber of unique filtered users: %s" %len(self.filtered_users) + "\nNumber of unique filtered items: %s" %len(self.filtered_items) + "\n\nPrecision: %s" %precision + "\nRecall: %s" %recall + "\nPrecision filtered: %s" %precision_filtered + "\nRecal filtered: %s" %recall_filtered + "\nStart time: %s" %start_time + "\nEnd time: %s" %end_time

		sender = Mailer('smtp.tudelft.nl')
		sender.send(message)

	def RecommendedFollowers(self, user_id):	
		inputf = open(self.inputfile, 'r')

		# Read inputf and extract the same number of recommended_followers
		# as there are followers in my profile [len(my_friends_ids)]
		# while (number_of_friends_processed < len(my_friends_ids)):
		
		friend_circle = 0
		
		# Extract recommended followees from input file, to do so it is
		# enough to read only the rows (first column [0]) from the input file.
		for line in inputf:
			if not(friend_circle > 59):				
				pattern = re.compile(r'RecommendedItem\[item\:(\d+)\, value\:1\.0]', re.UNICODE)
				recommended_followee = pattern.findall(line)
				self.recommended_followers.append(recommended_followee)
				friend_circle = friend_circle + 1					
		inputf.close()

		return self.recommended_followers

	def ProcessTweets(self, user_id, tweets):
		self.output_logs.write("Entering ProcessTweets... \n")			
		print "Entering ProcessTweets... \n"				
			
		tweets_with_url = 0	
			
		# For each of the (5) tweets in my user_timeline get URLs			
		for tweet in tweets:
			
			if not(len(tweet.entities['urls']) == 0):
			
				# Add it to the unique URLs list
				if not(len(tweet.entities['urls'][0]['url']) == 0) and not(tweets_with_url > 4):
				
					# Extract URL from Tweet
					url = self.GetURLFromTweet(tweet)
				
					self.AddToItemList(user_id, tweet.user.id, url)
					tweets_with_url = tweets_with_url + 1
					filtered_tweet = self.FilterContent(tweet)
					if not(filtered_tweet is None):
						self.AddToFilteredItemList(user_id, tweet.user.id, url)	
			
	def ProcessRecommendedFriendsTweets(self, user_id):
		self.output_logs.write("Entering ProcessrecommendedFriendsTweets... \n")
		print "Entering ProcessrecommendedFriendsTweets... \n"
		
		# Get recommended Followers
		#try:
		recommended_followers = self.RecommendedFollowers(user_id)
		#except:
		#	pass
			
		if not(recommended_followers is None):	

			friend_circle = 0
		
			# For each friend, get their tweets and then
			# extract URLs and add them uniquely to list
			#pdb.set_trace()
			
			for followee in recommended_followers:
						
				# Check that we received a timeline and that we're under the 25 
				# friends limit		
				if not(friend_circle > 29):
					
					recommended_followee = followee[0]
			
					try:	
						followee_timeline = self.GetUserTimeline(recommended_followee)
					except:
						continue
			
					if not(followee_timeline is None):
						
						friend_circle = friend_circle + 1					
						
						tweets_with_url = 0	
			
						for tweet in followee_timeline:
							
							if not(len(tweet.entities['urls']) == 0):
							
								# Add it to the unique URLs list
								if not(len(tweet.entities['urls'][0]['url']) == 0) and not(tweets_with_url > 4):
								
									# Extract URL from Tweet
									url = self.GetURLFromTweet(tweet)
								
									self.AddToItemList(user_id, tweet.user.id, url)
									tweets_with_url = tweets_with_url + 1
									filtered_tweet = self.FilterContent(tweet)
									if not(filtered_tweet is None):
										self.AddToFilteredItemList(user_id, tweet.user.id, url)	
					
					# Process the tweets of friends of friends of user
					self.ProcessFriendsOfrecommendedFriendsTweets(recommended_followee)			
	
	def ProcessFriendsOfRecommendedFriendsTweets(self, user_id):		
		# Get the friends ids of a user
		try:
			friends_ids = self.GetUserFriends(user_id)
		except:
			pass
		
		if not(friends_ids is None):
			
			friend_circle = 0
			 	
			# For each friend, get their tweets and then
			# extract URLs and add them uniquely to list
			#pdb.set_trace()
		
			for followee in friends_ids:	
				
				# Check that we received a timeline and that we're under the 25 
				# friends limit	
				if not(friend_circle > 29):
				
					try:
						followee_timeline = self.GetUserTimeline(followee.id)
						friend_circle = friend_circle + 1					
					except:
						continue
				
					if not(followee_timeline is None):
					
						tweets_with_url = 0	
					
						for tweet in followee_timeline:	
							
							if not(len(tweet.entities['urls']) == 0):
							
								# Add it to the unique URLs list
								if not(len(tweet.entities['urls'][0]['url']) == 0) and not(tweets_with_url > 4):
								
									# Extract URL from Tweet
									url = self.GetURLFromTweet(tweet)
								
									self.AddToItemList(user_id, tweet.user.id, url)
									tweets_with_url = tweets_with_url + 1				
									filtered_tweet = self.FilterContent(tweet)
									if not(filtered_tweet is None):
										self.AddToFilteredItemList(user_id, tweet.user.id, url)	

	def GetURLFromTweet(self, tweet):	

		if not(len(tweet.entities['urls']) == 0):

			if not (len(tweet.entities['urls'][0]['url']) == 0):

				if not(tweet.entities['urls'][0]['expanded_url'] is None):
					url_posted = tweet.entities['urls'][0]['expanded_url']
					real_url = url_posted
				else:						
					url_posted = tweet.entities['urls'][0]['url']
					real_url = url_posted

				if (self.expander_service == 0):
					real_url = self.ExpandURL(url_posted)
				elif (self.expander_service == 1):
					real_url = self.LongURL(url_posted)
				elif (self.expander_service == 2): 
					real_url = self.LongURLPlease(url_posted)

				if (real_url == url_posted):
					if (self.expander_service == 0):
						real_url = self.ExpandURL(url_posted)
					elif (self.expander_service == 1):
						real_url = self.LongURL(url_posted)
					elif (self.expander_service == 2): 
						real_url = self.LongURLPlease(url_posted)

				if (real_url == url_posted):
					if (self.expander_service == 0):
						real_url = self.ExpandURL(url_posted)
					elif (self.expander_service == 1):
						real_url = self.LongURL(url_posted)
					elif (self.expander_service == 2): 
						real_url = self.LongURLPlease(url_posted)					

				if (real_url == url_posted):
					try:
						shortened_url = urllib2.urlopen(url_posted) 
						real_url = shortened_url.url
					except:
						real_url = url_posted

				expanded_url = real_url

				# Still, the 'expanded' real_url might just be another layer of the onion
				# so go through the whole process, but only once more, to see if we can
				# finally get the real real_url.
				if (len(real_url) < 30):

					if (self.expander_service == 0):
						real_url = self.ExpandURL(url_posted)
					elif (self.expander_service == 1):
						real_url = self.LongURL(url_posted)
					elif (self.expander_service == 2): 
						real_url = self.LongURLPlease(url_posted)

					if (real_url == expanded_url):
						if (self.expander_service == 0):
							real_url = self.ExpandURL(url_posted)
						elif (self.expander_service == 1):
							real_url = self.LongURL(url_posted)
						elif (self.expander_service == 2): 
							real_url = self.LongURLPlease(url_posted)

					if (real_url == expanded_url):
						if (self.expander_service == 0):
							real_url = self.ExpandURL(url_posted)
						elif (self.expander_service == 1):
							real_url = self.LongURL(url_posted)
						elif (self.expander_service == 2): 
							real_url = self.LongURLPlease(url_posted)					

					if (real_url == expanded_url):
						try:
							shortened_url = urllib2.urlopen(url_posted) 
							real_url = shortened_url.url
						except:
							real_url = expanded_url

				return real_url
		else:
			return None

	def ExpandURL(self, url_posted):

		try:
			shortened_url = urllib2.Request("http://expandurl.appspot.com/expand?url=%s" %url_posted)
			opener = urllib2.build_opener()
			f_url = opener.open(shortened_url)
			expanded_url = f_url.read()
			json_url = simplejson.loads(expanded_url)

			# Check if the URL has multiple redirects and choose the last one
			if (json_url["status"] == "OK"):
				url_key = json_url["urls"]		
				if (len(url_key) > 2):
					real_url = url_key[2]
				elif (len(url_key) == 2):
					real_url = url_key[1]
				else:
					real_url = url_key[0]	
				return real_url

			# Check if despite being 'InvalidURL' maybe there is still something useful	
			elif (json_url["status"] == "InvalidURL"):
				url_key = json_url["urls"]		
				if (len(url_key) > 2):
					real_url = url_key[1]
				elif (len(url_key) == 2):
					real_url = url_key[1]
				else:
					real_url = json_url["start_url"]	
				return real_url

			# IF status is not OK or not InvalidURL then log and return posted URL
			else:
				try:
					self.output_logs.write("ExpandURL failure with URL: %s\n" %url_posted)
				except:
					self.output_logs.write("ExpandURL failure with URL")
				self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
				if(self.number_exceptions[self.expander_service] > 0):
					self.expander_service = (self.expander_service + 1) % 3
					self.number_exceptions[self.expander_service] = 0
				return url_posted
		except:
			try:
				self.output_logs.write("ExpandURL exception with URL: %s\n" %url_posted)
			except:
				self.output_logs.write("ExpandURL exception with URL")
			self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
			if(self.number_exceptions[self.expander_service] > 0):
				self.expander_service = (self.expander_service + 1) % 3
				self.number_exceptions[self.expander_service] = 0		
			return url_posted

	def LongURL(self, url_posted):

		try:
 			shortened_url = urllib2.Request("http://api.longurl.org/v2/expand?url=%s" %url_posted + "&format=json")
			opener = urllib2.build_opener()
			f_url = opener.open(shortened_url)
			expanded_url = f_url.read()
			json_url = simplejson.loads(expanded_url)

			if not(json_url["long-url"] is None):
				real_url = json_url["long-url"]	
				return real_url
			else:
				try:
					self.output_logs.write("LongURLPlease failure with URL: %s\n" %url_posted)
				except:
					self.output_logs.write("LongURLPlease failure with URL")
				self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
				if(self.number_exceptions[self.expander_service] > 0):
					self.expander_service = (self.expander_service + 1) % 3
					self.number_exceptions[self.expander_service] = 0
				return url_posted
		except:
			try:
				self.output_logs.write("LongURLPlease exception with URL: %s\n" %url_posted)
			except:
				self.output_logs.write("LongURLPlease exception with URL")
			self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
			if(self.number_exceptions[self.expander_service] > 0):
				self.expander_service = (self.expander_service + 1) % 3
				self.number_exceptions[self.expander_service] = 0
			return url_posted

	def LongURLPlease(self, url_posted):

		try:
 			shortened_url = urllib2.Request("http://www.longurlplease.com/api/v1.1?q=%s" %url_posted)
			opener = urllib2.build_opener()
			f_url = opener.open(shortened_url)
			expanded_url = f_url.read()
			json_url = simplejson.loads(expanded_url)

 			if not(json_url[url_posted] is None):
				real_url = json_url[url_posted]	
				return real_url
			else:
				try:
					self.output_logs.write("LongURLPlease failure with URL: %s\n" %url_posted)
				except:
					self.output_logs.write("LongURLPlease failure with URL: %s\n" %url_posted)
				self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
				if(self.number_exceptions[self.expander_service] > 0):
					self.expander_service = (self.expander_service + 1) % 3
					self.number_exceptions[self.expander_service] = 0
				return url_posted
		except:
			try:
				self.output_logs.write("LongURLPlease exception with URL: %s\n" %url_posted)
			except:
				self.output_logs.write("LongURLPlease exception with URL")
			self.number_exceptions[self.expander_service] = self.number_exceptions[self.expander_service] + 1
			if(self.number_exceptions[self.expander_service] > 0):
				self.expander_service = (self.expander_service + 1) % 3
				self.number_exceptions[self.expander_service] = 0
			return url_posted


	def AddToItemList(self, parent_user_id, child_user_id, url):

		try:
			self.output_logs.write("From " + "%s" %child_user_id+"'s profile \n")
			self.output_logs.write("URL: " "%s" %url + "\n")

			print "From " + "%s" %child_user_id+"'s profile"
			print "URL: " "%s" %url + "\n"
		except:
			pass

		# Add user_id to Users list, if not there already	
		if (self.users.count(child_user_id) == 0):
			self.users.append(child_user_id)				
		# Put URL in items list if it's not there already
		if (self.items.count(url) == 0):
			self.items.append(url)

		# Add URL to total items	
		self.total_items.append(url)

		if not(url is None):
			#pdb.set_trace()
			try:
				self.output.write("%s" %child_user_id.strip + ",%s" % self.items.index(url) + "\n")
				self.output.write("%s" %parent_user_id.strip + ",%s" % self.items.index(url) + "\n")										
				self.output_info.write("%s" %child_user_id.strip + ", %s" % self.items.index(url) + "= " + "%s" %url + "\n")
				self.output_info.write("%s" %parent_user_id.strip + ", %s" % self.items.index(url) + "= " + "%s" %url + "\n")
			except:
				pass

	def AddToFilteredItemList(self, parent_user_id, child_user_id, url):

		try:
			self.output_logs.write("[FILTERED] From " + "%s" %child_user_id+"'s profile \n")
			self.output_logs.write("[FILTERED] URL: " "%s" %url + "\n")

			print "[FILTERED] From " + "%s" %child_user_id+"'s profile"
			print "[FILTERED] URL: " "%s" %url + "\n"
		except:
			pass

		# Add user_id to Users list, if not there already	
		if (self.filtered_users.count(child_user_id) == 0):
			self.filtered_users.append(child_user_id)				
		# Put URL in items list if it's not there already
		if (self.filtered_items.count(url) == 0):
			self.filtered_items.append(url)
		
		# Add URL to total filtered items
		self.total_filtered_items.append(url)
		
		if not(url is None):
			try:
				self.output_filtered.write("%s" %child_user_id.strip + ",%s" % self.filtered_items.index(url) + "\n")	
				self.output_filtered.write("%s" %parent_user_id.strip + ",%s" % self.filtered_items.index(url) + "\n")									
				self.output_filtered_info.write("%s" %child_user_id.strip + ", %s" % self.filtered_items.index(url) + "= " + "%s" %url + "\n")								
				self.output_filtered_info.write("%s" %parent_user_id.strip + ", %s" % self.filtered_items.index(url) + "= " + "%s" %url + "\n")
			except:
				pass

	def FilterContent(self, tweet):

		words_in_tweet = tweet.text.split(' ')

		for word in words_in_tweet:
			if word in self.tweetum_entities:
				try:
					self.output_logs.write("TweetUM Match! Matched with word: %s" %word + "\n")
					print "TweetUM Match! Matched with word: %s" %word + "\n"
				except:
					pass
				return tweet

####################################################################################

def RunMain():
	usage = "(First version) usage: %prog <inputfilepath> <outputfilepath> <screen_name>"
	parser = OptionParser(usage)
	(options, args) = parser.parse_args()

	if( len(args) < 3):
		print "Invalid number of arguments. Use RFN.py --help to see the details."
	else:
		inputfilepath = args[0]
		outputfilepath = args[1]
		screen_name = args[2]

		try:
			print "Updating the recommended followers user-item matrix"

			RecommendedFollowers(inputfilepath, outputfilepath, screen_name)
		except:
			pass
			raise

if( __name__ == "__main__"):
	RunMain()	

	
