package org.srilankaliving.analytics;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;


public class EthnicGroupAnalyzer extends Configured implements Tool {
    public static final String SINHALA = "sinhalese";
    public static final String SLTAMIL = "sriLankaTamil";
    public static final String INDTAMIL = "indianTamil";
    public static final String SLMOOR = "sriLankaMoor";
    public static final String BURGER = "burgher";
    public static final String MALAY = "Malay";
    public static final String SLCHETTY = "sriLankaChetty";
    public static final String BHARATHA = "bharatha";
    public static final String OTHER = "other";

    public static class EthMapClass extends MapReduceBase
            implements Mapper<LongWritable, Text, Text, Text> {

        public void map(LongWritable key, Text value,
                        OutputCollector<Text, Text> output,
                        Reporter reporter) throws IOException {
            String line = value.toString();
            String strTownName = null;
            String strEtnisity = null;
            String[] rowData = line.split(" ");
            int[] populationFigures = new int[rowData.length - 1];
            strTownName = rowData[0];
            if (rowData.length >= 11) {
                for (int intend = 1; intend <= rowData.length - 11; intend++) {
                    strTownName = strTownName + " " + rowData[intend];
                }
                Text townName = new Text(strTownName);
                for (int index = rowData.length - 10; index <= rowData.length - 1; index++) {
                    populationFigures[index-1] = Integer.parseInt(rowData[index].replace(",", ""));
                }

                Text ethnicity = new Text(getLargestPopulation(populationFigures));
                output.collect(townName, ethnicity);
            }

        }
    }

    public static class EthReduce extends MapReduceBase
            implements Reducer<Text, Text, Text, Text> {


        public void reduce(Text text, Iterator<Text> intWritableIterator, OutputCollector<Text, Text> textTextOutputCollector, Reporter reporter) throws IOException {
            while (intWritableIterator.hasNext()) {
                textTextOutputCollector.collect(text, new Text(intWritableIterator.next().toString()));
                break;
            }
        }
    }

    private static String getLargestPopulation(int[] array) {
        int largest = array[1];
        String ethnicity = null;
        int index = 1;
        for (int i = 2; i < array.length; i++) {
            if (array[i] >= largest) {
                largest = array[i];
                index = i;
            }
        }
        if (index == 1) {
            ethnicity = SINHALA;
        } else if (index == 2) {
            ethnicity = SLTAMIL;
        } else if (index == 3) {
            ethnicity = INDTAMIL;
        } else if (index == 4) {
            ethnicity = SLMOOR;
        } else if (index == 5) {
            ethnicity = BURGER;
        } else if (index == 6) {
            ethnicity = MALAY;
        } else if (index == 7) {
            ethnicity = SLCHETTY;
        } else if (index == 8) {
            ethnicity = BHARATHA;
        } else {
            ethnicity = OTHER;
        }
        return ethnicity;
    }


    public int run(String[] args) throws Exception {
        JobConf conf = new JobConf(EthnicGroupAnalyzer.class);
        conf.setJobName("wordcount");

        conf.setOutputKeyClass(Text.class);
        conf.setOutputValueClass(Text.class);
        conf.setMapperClass(EthMapClass.class);
        conf.setCombinerClass(EthReduce.class);
        conf.setReducerClass(EthReduce.class);

        FileInputFormat.setInputPaths(conf, new Path(QOLUtils.ETHINIC_GR_PATH));
                FileOutputFormat.setOutputPath(conf, new Path(QOLUtils.ETHINIC_GR_OUT_PATH));

        JobClient.runJob(conf);
        return 0;
    }

    public static class IntComparator extends WritableComparator {

        public IntComparator() {
            super(IntWritable.class);
        }

        @Override
        public int compare(byte[] b1, int s1, int l1,
                           byte[] b2, int s2, int l2) {

            Integer v1 = ByteBuffer.wrap(b1, s1, l1).getInt();
            Integer v2 = ByteBuffer.wrap(b2, s2, l2).getInt();

            return v1.compareTo(v2) * (-1);
        }
    }


    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(), new EthnicGroupAnalyzer(), args);
        System.exit(res);
    }


}
