package mapredce2;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import mrutil.MRDPUtils;

public class CountNumUsersByStateDriver {
	public static class CountNumUserByStateMapper extends Mapper<Object, Text, NullWritable, NullWritable> {
		public static final String STATE_COUNTER_CROUP = "State";
		private String[] statesArray = new String[] { "AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DE", "FL", "GA", "HI",
				"ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV",
				"NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC", "SF", "TN", "TX", "UT", "VT",
				"VA", "WA", "WV", "WI", "WY" };
		private HashSet<String> states = new HashSet<String>(Arrays.asList(statesArray));

		public void map(Object eky, Text values, Context context) throws IOException, InterruptedException {

			Map<String, String> parsed = MRDPUtils.transformXmlToMap(values.toString());
			String location = parsed.get("Location");
			if (location != null && !location.isEmpty()) {
				boolean unknown = true;
				String[] tokens = location.toUpperCase().split("\\s");
				for (String state : tokens) {
					if (states.contains(state)) {
						context.getCounter(STATE_COUNTER_CROUP, state).increment(1);
						unknown = false;
						break;
					}
				}
				if (unknown) {
					context.getCounter(STATE_COUNTER_CROUP, "Unknown").increment(1);
				}
			} else {
				context.getCounter(STATE_COUNTER_CROUP, "NullOrEmpty").increment(1);
			}

		}

		public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
			Configuration conf = new Configuration();
			@SuppressWarnings("deprecation")
			Job job = new Job(conf, "Count Num Users by State");
			job.setJarByClass(CountNumUsersByStateDriver.class);
			job.setMapperClass(CountNumUserByStateMapper.class);
			job.setNumReduceTasks(0);
			job.setOutputKeyClass(NullWritable.class);
			job.setOutputValueClass(NullWritable.class);
			Path inputPath = new Path("hdfs://192.168.8.70:8020/kxd/example_in");
			Path outPath = new Path("hdfs://192.168.8.70:8020/kxd/example_out");
			FileInputFormat.addInputPath(job, inputPath);
			FileOutputFormat.setOutputPath(job, outPath);
			int code = job.waitForCompletion(true) ? 0 : 1;
			if (code == 0) {
				for (org.apache.hadoop.mapreduce.Counter counter : job.getCounters()
						.getGroup(CountNumUserByStateMapper.STATE_COUNTER_CROUP)) {
					System.out.println(counter.getDisplayName() + "\t" + counter.getValue());

				}
			}
			//FileSystem.get(conf).delete(outPath, true);
			System.exit(code);

		}

	}
}
