package com.demo.yarn.mr2;

import java.io.IOException;
import java.util.Calendar;
import java.util.Date;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class MapReduceCaseFilte extends Configured implements Tool {
	
	public static class FilterMapper extends Mapper<Object, Text, NullWritable, Text>{
		@Override
		protected void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			//以空格切分字段
			String[] strSrc = value.toString().split(" ");
			//拼接字符串
			String strDst = strSrc[0] + " " + strSrc[1] + " " + strSrc[2] + " " + strSrc[6] ;
			context.write(NullWritable.get(), new Text(strDst));
		}
	}
	
	
	@Override
	public int run(String[] args) throws Exception {
		Job job = Job.getInstance(getConf(), "mrfilter") ;
		job.setJarByClass(MapReduceCaseFilte.class);
		
		/*设置map方法的类*/
		job.setMapperClass(FilterMapper.class);
		
		/*设置输出的key和value的类型*/
		job.setOutputKeyClass(NullWritable.class);
		job.setOutputValueClass(Text.class);
		
		/*设置输入输出参数*/
		FileInputFormat.addInputPath(job, new Path(args[0]));
		Calendar calendar = Calendar.getInstance();
		calendar.setTime(new Date());
		FileOutputFormat.setOutputPath(job, new Path(args[1]+calendar.get(Calendar.MILLISECOND)));
		
		/*提交作业到集群并等待任务完成*/
		boolean isSuccess = job.waitForCompletion(true);
		
		return isSuccess ? 0 : 1 ;
	}

	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new MapReduceCaseFilte(), args) ;
		System.exit(res);
	}
}

// cat act  
// tar art

//<act,{cat,tac,cta}>





