#!/bin/bash
dataImporter=/mnt/s3buckets
storeLogs=/mnt/s3buckets/inputLogs/
dir=/mnt/s3buckets/akamai_logs

#for f in "$dir"/*; do
#	echo "$f"
#	intermediateFile=${f#*'/'}
#	subIntermediateFile=${intermediateFile#*'/'}
#	lastIntermediateFilte=${subIntermediateFile#*'/'}
#	folder=${lastIntermediateFilte%'/'*}
#	file=${lastIntermediateFilte#*'/'}
#	echo "com.autodesk.akn/$folder/$file" 
	#sh scriptBigData.sh com.autodesk.akn/$bucket/$folder
	#process=$(sh root/elastic-mapreduce-ruby/scriptBigData.sh com.autodesk.akn/$bucket/$folder) 
	
	process=$(sh scriptBigData2.sh $1) 
	#GET JOB ID
	jobIntermediate=${process#*':'}
	jobId=${jobIntermediate%' '*}
	echo "JOB ID: $jobId -----------------------------------------------------------"
	
	finalstate="RUNNING"
	echo "Checking state...";

	while [ "$finalstate" == "RUNNING" ]; do
		echo "State: $finalstate";
		sleep 300
		state=$(./elastic-mapreduce --list j-215CCD48B1OAA) 
		cutState=${state#*' '}
		finalstate=${cutState%'ec2'*}
	done
		if [ "$finalstate" == "COMPLETED" ]; then
			echo "Completed.Import to Database";
			#mv $f $storeLogs 
			mv $1 $storedLogs
			echo "Moved: $1 $storeLogs";
			$dataImporter/DBLoader.sh			
		fi
#done



