"""
Upload JSON metadata datasets to data.gov.be.
Requires Selenium and PhantomJS.
"""

__author__ = "Bart Hanssens <bart.hanssens@fedict.be>"

import argparse, os, time, logging
import json
from splinter import Browser
from selenium.webdriver.support.select import Select

logging.basicConfig(filename='upload-datagov.log', level=logging.DEBUG)
selenium_log = logging.getLogger('selenium.webdriver.remote.remote_connection')
selenium_log.setLevel(logging.WARNING)

browser = Browser('phantomjs')

def setoptions(element, options):
	"""Set option(s) in select list"""
	select = Select(browser.driver.find_element_by_id(element))
	for o in options:
		if o:
			select.select_by_visible_text(o)


def dataset(url, lang, infile):
	"""Create dataset on data.gov.be"""
	logging.info('Parsing ' + infile)

	with open(infile, 'r') as f:
		data = json.load(f)

	# browser.visit(...) fails with 403 - Forbidden, so use driver directly
	browser.driver.get(url + '/' + lang + '/node/add/dataset?language=' + lang)

	# Required metadata
	browser.fill('title', data['title'])

	browser.find_by_id('wysiwyg-toggle-edit-body').click()
	browser.fill('body', data['body'])

	setoptions('edit-taxonomy-9', data['tags'])
	setoptions('edit-taxonomy-6', data['media'])
	setoptions('edit-taxonomy-3', data['org'])

	# Add links to details / context of the datasets 
	i = 0
	for c in data['contexts']:
		browser.fill('field_details' + lang + '[' + str(i) + '][url]', c)
		i = i + 1

	if 'urls' in data:
		# Add more URL fields if needed
		for a in range(0, len(data['urls']) - 2):
			browser.find_by_id('edit-field-links-' + lang + 
							'-field-links-' + lang + '-add-more').click()
			logging.info('Adding URL field')
			time.sleep(3)
		# Now add all the URLs
		i = 0
		for u in data['urls']:
			browser.fill('field_links_' + lang + '[' + str(i) + '][url]', u)
			i = i + 1

	# Optional data.gov.be metadata
	if 'fmts' in data:
		setoptions('edit-taxonomy-5', data['fmts'])

	if 'geo' in data:
		setoptions('edit-taxonomy-10', data['geo'])

	if 'license' in data:
		setoptions('edit-taxonomy-8', data['license'])

	if 'freq' in data:	
		setoptions('edit-taxonomy-4', data['freq'])

	if 'time' in data:
		if data['time'][0]:
			browser.fill('field_timefrom[0][value][date]', data['time'][0])
		if data['time'][1]:
			browser.fill('field_timefrom[0][value2][date]', data['time'][1])

	if 'source' in data:
		browser.fill('field_source[0][value]', data['source'])

	browser.find_by_id('edit-submit').click()


def login(url, user, pwd):
	"""Log in on data.gov.be with a user capable of creating data sets"""
	browser.driver.get(url + '/user')
	browser.fill('name', user)
	browser.fill('pass', pwd)
	browser.find_by_id('edit-submit').click()

def logout(url):
	"""Log out"""
	browser.driver.get(url + '/logout')


def main():	
	parser = argparse.ArgumentParser(description='Upload CKAN-ish datasets to data.gov.be')
	parser.add_argument('--url', help='Data.gov.be url', required=True)
	parser.add_argument('--user', help='Username', required=True)
	parser.add_argument('--pwd', help='Password', required=True)
	parser.add_argument('--lang', help='Language (nl, en, fr, de)', required=True)
	parser.add_argument('--indir', help='Input directory containing JSON files', required=True)

	args = parser.parse_args()

	logging.info('-- START --')

	try:
		login(args.url, args.user, args.pwd)
		time.sleep(4)

		for f in sorted(os.listdir(args.indir)):
			dataset(args.url, args.lang, os.path.join(args.indir, f))
			time.sleep(4)

		logout(args.url)
	except Exception as e:
		logging.exception("Fatal exception " + browser.url)
		browser.driver.save_screenshot('ckan-datagov.png')

	logging.info('-- END --')

	browser.quit()


if __name__ == "__main__":
	main()
