package com.zhangwoo.analyser.jobs.matcher;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.log4j.Logger;

class MyLastHourDataAnalyserMapper extends MapReduceBase implements
		Mapper<LongWritable, Text, Text, Text> {
	JobConf job=null;
	private Logger logger = Logger.getLogger(getClass());
	
	@Override
	public void configure(JobConf job) {
		this.job=job;
	}

	public void map(LongWritable key, Text value,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException {
		try {
			new TextMatcherWithKPEO().processText(job.get("strategy"),job.get("good"),job.get("bad"),job.get("other"), value.toString(),job.get("class"));
		} catch (Exception e) {
			logger.error(value.toString()+" analyse error");
		}
	}
}

class MyLastHourDataAnalyserReducer extends MapReduceBase implements
		Reducer<Text, Text, Text, Text> {

	public void reduce(Text key, Iterator<Text> values,
			OutputCollector<Text, Text> output, Reporter reporter)
			throws IOException {

	}
}

public class MyLastHourDataAnalyser {
	/**
	 * @param args
	 * @throws IOException
	 */
	public static void main(String[] args) throws IOException {
		if(args.length==0)
			System.exit(-1);

		JobConf conf = new JobConf(MyLastHourDataAnalyser.class);
		conf.setJobName("Analyser_of_LastHour@" + new java.util.Date().getTime());
		conf.set("strategy", args[0]);
		conf.set("good", args[1]);
		conf.set("bad", args[2]);
		conf.set("other", args[3]);
		conf.set("class", args[4]);
		addTmpJar("/hadoop/hadoop-1.0.3/lib/gson-2.2.2.jar", conf);  
		for (int i=5;i<args.length;i++) {
			FileInputFormat.addInputPaths(conf, args[i]);
		}
		FileOutputFormat.setOutputPath(conf, new Path("result/"+new java.util.Date().getTime()));

		conf.setMapperClass(MyLastHourDataAnalyserMapper.class);
		conf.setReducerClass(MyLastHourDataAnalyserReducer.class);
		conf.setNumReduceTasks(0);

		conf.setInputFormat(TextInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);
		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(Text.class);
		JobClient.runJob(conf);
	}
	
	/**
	 * 为Mapreduce添加第三方jar包
	 * 
	 * @param jarPath
	 *            举例：D:/Java/new_java_workspace/scm/lib/guava-r08.jar
	 * @param conf
	 * @throws IOException
	 */
	public static void addTmpJar(String jarPath, Configuration conf) throws IOException {
		System.setProperty("path.separator", ":");
		FileSystem fs = FileSystem.getLocal(conf);
		String newJarPath = new Path(jarPath).makeQualified(fs).toString();
		String tmpjars = conf.get("tmpjars");
		if (tmpjars == null || tmpjars.length() == 0) {
			conf.set("tmpjars", newJarPath);
		} else {
			conf.set("tmpjars", tmpjars + "," + newJarPath);
		}
	}
}