from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
import hashlib
import urllib
import urllib2
import locale
import codecs
import time
import sys
import re
import csv


def formatURL(name, outf, first):
	link = name
	domain = link
	label = 'B1'
	real_type = 'Benign'
	#ustop.write(name + '\n' + link + '\n\n')
	hex_hash = hashlib.sha256(link).hexdigest()
	if first:
		first = False
		outf.write('(\'' + hex_hash + '\',\'' + link + '\',\'' + domain + '\',\'' + label + '\',\'' + real_type + '\')')
	else:
		outf.write(',\n(\'' + hex_hash + '\',\'' + link + '\',\'' + domain + '\',\'' + label + '\',\'' + real_type + '\')')
	return first



def formatSites(infile = '../../data/TempData/URLs/top-1m.csv', outfile = '../../data/TempData/URLs/benignurl-2013-11-14'):
	with open(infile, 'r') as inf:
		reader = csv.reader(inf)
		counter = 0
		for row in reader:
			if counter % 1000 == 0:
				first = True
				part = counter / 1000
				if part > 0:
					outf.close()
				outf = open(outfile + '-' + str(part), 'w')
			url = row[-1]
			first = formatURL(url, outf, first)
			counter = counter + 1



def getAlexaTop():
	'''
	Get the benign list of websites. Including top N of global/US websites.
	'''
	globaltop = '../../data/TempData/URLs/alexaglobaltop'
	ustop = '../../data/TempData/URLs/alexaustop'
	total = 500
	if len(sys.argv) == 2:
		total = int(sys.argv[1])

	globaltop = globaltop + str(total)
	ustop = ustop + str(total)
	globaltop = codecs.open(globaltop, encoding = 'utf-8', mode = 'w')
	ustop = codecs.open(ustop, encoding = 'utf-8', mode = 'w')

	# use selenium to visit the page
	locale.setlocale( locale.LC_ALL, 'en_US.UTF-8' ) 
	profile = webdriver.FirefoxProfile()
	browser = webdriver.Firefox(profile) # Get local session of firefox
	browser.delete_all_cookies()

	urlp1 = "http://www.alexa.com/"
	urlp2 = "topsites/global;"
	perpage = 25
	upperbound = total/perpage
	pages = []
	first = True
	for i in range(0, upperbound):
		pages.append(str(i))
	for urlp3 in pages:
		url = urlp1 + urlp2 + urlp3
		browser.get(url)
		site_listing = browser.find_elements_by_class_name("site-listing")
		for website in site_listing:
			name = website.find_element_by_tag_name("h2").find_element_by_tag_name("a").text
			# link = website.find_element_by_class_name("topsites-label").text
			first = formatURL(name, globaltop, first)


	urlp2 = "topsites/countries;"
	pages = []
	first = True
	for i in range(0, upperbound):
		pages.append(str(i))
	urlp4 = "/US"
	for urlp3 in pages:
		url = urlp1 + urlp2 + urlp3 + urlp4
		browser.get(url)
		site_listing = browser.find_elements_by_class_name("site-listing")
		for website in site_listing:
			name = website.find_element_by_tag_name("h2").find_element_by_tag_name("a").text
			# link = website.find_element_by_class_name("topsites-label").text
			first = formatURL(name, ustop, first)

	browser.close()
	globaltop.close()
	ustop.close()


if __name__=="__main__":
	# getAlexaTop()
	# formatSites()
	formatSites(infile = '../../data/TempData/URLs/damballa-benign', outfile = '../../data/TempData/URLs/yacin-benign')


