# Given a k-means set of centroids, cluster rich user-year vectors into the model clusters by closes centroidw

import os
import sys
import multiprocessing
import datetime
import traceback

import random

def main():

	# The number of worker processes the worker pool will use to process the work queue.
	# Recommend setting this to half the number of available cores to avoid hogging resources.
	# That would be 24 cores on platinum.
	# Once you get above a certain level of CPU usage, I/O throughput will bottleneck anyway.
	NUM_WORKERS = 24
	
	# The number of translation tasks given to each process at a time.
	# Recommend this setting to be 1 for maximum throughput.
	# If I/O throughput ever becomes an issue, increasing this value will make non-CPU bound tasks run more efficiently.
	TASK_PARTITION_SIZE = 1
	
	
	inputDir = sys.argv[1]
	outputDir = sys.argv[2]

	inputFileList = os.listdir(inputDir)

	filePathArgs = []
	
	for inputFileName in inputFileList:
		
		inputFilePath = inputDir+'/'+inputFileName
		outputFilePath = outputDir+'/'+inputFileName
		
		if os.path.isfile(inputFilePath):
			filePathArgs.append( (inputFilePath, outputFilePath) )

	workerPool = multiprocessing.Pool(NUM_WORKERS)
	workerPool.map_async(processFile, filePathArgs, TASK_PARTITION_SIZE)
	workerPool.close()
	workerPool.join()
			
def processFile(filePathArgs):
	
	try:
		inputFilePath = filePathArgs[0]
		outputFilePath = filePathArgs[1]

		print timestamp() + " Starting processing for " + inputFilePath + " to " + outputFilePath 

		inputFileHandle = open(inputFilePath,"r")
		outputFileHandle = open(outputFilePath, 'a+')

		translate(inputFileHandle, outputFileHandle)
	
		inputFileHandle.close()
		outputFileHandle.close()

		print timestamp() + " Finished processing for " + inputFilePath + " in " + outputFilePath
	
	except:
		# There is a problem where if a child process encounters an error or exception, the traceback gets written
		# to stderr but not flushed, so you never see it. This code fixes that problem by pushing it to stdout, where it is 
		# piped back to the main process through the pool.
		traceback.print_exc(limit=3, file=sys.stdout)
		sys.stdout.flush()

def translate(inputFileHandle, outputFileHandle):

	userData = []

        for line in inputFileHandle:
                fields = parseLine(line)

                if userData and fields['username'] != userData[0]['username']:
                        processUser(userData, outputFileHandle)
                        userData = []

                userData.append(fields)

         # Get the very last user that won't get processed in the loop
        processUser(userData, outputFileHandle)

def processUser(userdata, outputFileHandle):
	transitions = []

	CURRENT_YEAR = 2011
	last = None
	username = userdata[0]['username']
	cohort = userdata[0]['cohort']
	lifespan = userdata[0]['lifespan']
	for line in userdata:
		
		if last:
			transitions.append({'username': username, 'cohort': cohort, 'lifespan': lifespan, 'offset': last['offset'], 'current': last['cluster'], 'next': line['cluster']})

		last = line

	if last['year'] < CURRENT_YEAR:
		transitions.append({'username': username, 'cohort': cohort, 'lifespan': lifespan, 'offset': last['offset'] + 1, 'current': last['cluster'], 'next': 0})

	for line in transitions:
		out = [line['username'], str(line['cohort']), str(line['lifespan']), str(line['offset']), str(line['current']), str(line['next'])]
		outputFileHandle.write('\t'.join(out) + '\n')

def parseLine(line):

        tokens = line.strip().split('\t')

        fields = {}
        fields['username'] = tokens[0]
        fields['cohort'] = int(tokens[1])
        fields['lifespan'] = int(tokens[2])
        fields['offset'] = int(tokens[3])
        fields['year'] = int(tokens[4])
        fields['cluster'] = int(tokens[5])

        return fields


def timestamp():
	
	return datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')

# Protects against the script being loaded and run recursively in the child process
# Did not seem to be a problem, but better safe than sorry
if __name__ == '__main__':
	main()
