import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.mapreduce.Counter;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;


public class Utils {
	public static final String bucketName = "ahgass2";
	
	public static void writeToS3(Counter mapOutputCounter, Class<?> c, AWSCredentials credentials) throws IOException{
		String counterFileName = JobBuilder.counterFileName +c.getName()+".txt";
		File f = new File(counterFileName);
		FileWriter writer = new FileWriter(f);
		writer.write(c.getName()+": "+mapOutputCounter.getDisplayName()+": "+ mapOutputCounter.getValue());
		writer.close();
		putFileToS3(counterFileName, f, credentials);
	}

	public static void putFileToS3(String counterFileName, File f, AWSCredentials credentials) {
		AmazonS3Client s3Client = new AmazonS3Client(credentials);
		s3Client.putObject(new PutObjectRequest(bucketName, counterFileName , f).withCannedAcl(CannedAccessControlList.PublicRead));
	}

	public static void sumAllCounters(AWSCredentials credentials) {
		S3Object firstCounterFile = getObjectFromS3ByKey(JobBuilder.counterFileName+FirstStep.class.getName()+".txt", credentials);
		S3Object secondCounterFile = getObjectFromS3ByKey(JobBuilder.counterFileName+SecondStep.class.getName()+".txt", credentials);
		S3Object thirdCounterFile = getObjectFromS3ByKey(JobBuilder.counterFileName+ThirdStep.class.getName()+".txt", credentials);
		S3Object fourthCounterFile = getObjectFromS3ByKey(JobBuilder.counterFileName+FourthStep.class.getName()+".txt", credentials);
		S3Object partACounterFile = getObjectFromS3ByKey(JobBuilder.counterFileName+PartA.class.getName()+".txt", credentials);
		
		int recordCounter = 0;
		BufferedReader br = new BufferedReader(new InputStreamReader(
				firstCounterFile.getObjectContent()));
		recordCounter += readCounterFromFile(br);
		br = new BufferedReader(new InputStreamReader(secondCounterFile.getObjectContent()));
		recordCounter += readCounterFromFile(br);
		br = new BufferedReader(new InputStreamReader(thirdCounterFile.getObjectContent()));
		recordCounter += readCounterFromFile(br);
		br = new BufferedReader(new InputStreamReader(fourthCounterFile.getObjectContent()));
		recordCounter += readCounterFromFile(br);
		br = new BufferedReader(new InputStreamReader(partACounterFile.getObjectContent()));
		recordCounter += readCounterFromFile(br);
		
		System.out.println("Total Records passed between the mappers to the reducers in all the steps are : "+ recordCounter);
	}

	public static S3Object getObjectFromS3ByKey(String key, AWSCredentials credentials) {
		AmazonS3Client s3Client = new AmazonS3Client(credentials);
		return s3Client.getObject(new GetObjectRequest(bucketName, key));
	}
	
	private static int readCounterFromFile(BufferedReader br) {
		try {
			String line = br.readLine();
			String[] words = line.split(": ");
			return Integer.parseInt(words[2]);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return 0;
	}

}
