package statistics;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import common.ClickWritable;
import common.MetricsRecord;

public class MetricsBase extends Configured implements Tool {
	
	static enum Counters { PARSE_ERROR , LINE_COUNT, PV_COUNT, CLICK_COUNT, BARGAIN_COUNT, ALL_ASC_COUNT, ALL_ASC_PV_WRONG, PV_WRONG}
	static final String DIMENSION_LIST = "dimensions";
	static final String DIMENSION_SEP = "xxx";
	
	public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, ClickWritable>{
		private String[] dimensions;
		private ClickWritable cw = new ClickWritable();
		private Text featureValue = new Text();
		
		@Override
		public void configure(JobConf job){
			dimensions = job.get(DIMENSION_LIST).split(DIMENSION_SEP);
			
		}
		
		@Override
		public void map(LongWritable key, Text value, OutputCollector<Text, ClickWritable> output, Reporter reporter) throws IOException{
			
			String line = value.toString();
			MetricsRecord record;
			try{
				record = new MetricsRecord(line);
			}catch (Exception e){
				reporter.incrCounter(Counters.PARSE_ERROR, 1);
				System.out.println("parse error: " + line);
				return;
			}
			
			cw.setValue(record);
			
			reporter.incrCounter(Counters.LINE_COUNT, 1);
			reporter.incrCounter(Counters.PV_COUNT, cw.getPVCount());
			reporter.incrCounter(Counters.CLICK_COUNT, cw.getClickCount());
			reporter.incrCounter(Counters.BARGAIN_COUNT, cw.getBargainCount());
			if (record.getPVCount() < record.getClickCount()){
				reporter.incrCounter(Counters.PV_WRONG, 1);
			}
			
			for (String dimension : dimensions){
				dimension = dimension.toLowerCase();
				String str = record.getValue(dimension);
				if (dimension.equalsIgnoreCase("orderbytype") && str.equalsIgnoreCase("all_asc")){
					reporter.incrCounter(Counters.ALL_ASC_COUNT, 1);
					if (record.getPVCount() < record.getClickCount())
						reporter.incrCounter(Counters.ALL_ASC_PV_WRONG, 1);					
				}
				featureValue.set(dimension + ":" + str);
				output.collect(featureValue, cw);
			}			
		}		
	}
	
	public static class Reduce extends MapReduceBase implements Reducer<Text, ClickWritable, Text, ClickWritable>{		
		public void reduce(Text key, Iterator<ClickWritable> iter, OutputCollector<Text, ClickWritable> output, Reporter reporter) throws IOException{
			ClickWritable clickWritable = new ClickWritable();
			while (iter.hasNext()){				
				clickWritable.addClick(iter.next());
			}
			output.collect(key, clickWritable);
		}
	}
	
	@Override
	public int run(String[] args) throws Exception{
		JobConf conf = new JobConf(getConf(), MetricsBase.class);
		conf.setJobName("Metrics Statistics");
		
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(ClickWritable.class);
		
		conf.setMapperClass(Map.class);
		conf.setCombinerClass(Reduce.class);
		conf.setReducerClass(Reduce.class);
		
		conf.setInputFormat(TextInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);
		
		String strDimensions = null;
		String input = null;
		String output = null;
			
		for (int i = 0; i < args.length; ++i){
			if (args[i].equalsIgnoreCase("-dim")){
				strDimensions = args[++i];
			}
			else if (args[i].equalsIgnoreCase("-i")){				
				input = args[++i];
			}
			else if (args[i].equalsIgnoreCase("-o")){
				output = args[++i];
			}
			else if (args[i].equalsIgnoreCase("-mc")){
				conf.setNumMapTasks(Integer.parseInt(args[++i]));
			}
			else if (args[i].equalsIgnoreCase("-rc")){
				conf.setNumReduceTasks(Integer.parseInt(args[++i]));
			} 
			else if (args[i].equalsIgnoreCase("-nm")){
				conf.setJobName("Metrics Statistics" + args[++i]);
			}
		}
		if (strDimensions == null){
			System.out.println("dimension is null!");
			return 1;
		}
		if (input == null){
			System.out.println("input is null");
			return 1;
		}
		if (output == null){
			System.out.println("output is null");
			return 1;
		}
		
		conf.set(DIMENSION_LIST, strDimensions);
		FileInputFormat.setInputPaths(conf, new Path(input));
		FileOutputFormat.setOutputPath(conf, new Path(output));
		
		JobClient.runJob(conf);
		
		return 0;
	}
	
	public static void main(String args[]) throws Exception{
		System.out.println("input parameters: ");
		for (String s : args){
			System.out.println(s);
		}
		
		if (args.length < 6){
			System.out.println(args.length);
			System.out.println("usage: -dim <dim1xxxdim2> -i <intput> -o <output> [-mc <map count> -rc <reduce count> -nm <job name>]");
			System.exit(0);
		}
		
		int res = ToolRunner.run(new Configuration(), new MetricsBase(), args);
		System.out.println("job commit status :" + res);
		System.exit(res);
	}
}
