package demo.test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.lib.ChainMapper;
import org.apache.hadoop.mapred.lib.MultipleInputs;

import demo.mapper.BaseChainMap;
import demo.mapper.BaseChainMap2;
import demo.mapper.BaseChainMap3;
import demo.mapper.MultFirstMapper;
import demo.mapper.MultSecondMapper;
import demo.reducer.BaseChainReduce;
import demo.reducer.BaseReducer;

public class BaseJobConf {
	public static void main(String[] args) throws Exception {
		//System.setProperty("hadoop.home.dir", "D:/hadoop-2.6.4");
		Configuration cfg = new Configuration();
		JobConf jc = new JobConf(cfg, BaseJobConf.class);
		JobConf jc2 = new JobConf(false);
		ChainMapper.addMapper(jc, BaseChainMap.class, LongWritable.class, Text.class, Text.class, IntWritable.class,
				true, jc2);
		ChainMapper.addMapper(jc, BaseChainMap2.class, Text.class, IntWritable.class, IntWritable.class, Text.class,
				true, jc2);
		MultipleInputs.addInputPath(jc, new Path("c:/demo.txt"), TextInputFormat.class, ChainMapper.class);
		MultipleInputs.addInputPath(jc, new Path("c:/xxx.txt"), TextInputFormat.class, BaseChainMap3.class);
		//jc.setMapperClass(ChainMapper.class);
		jc.setReducerClass(BaseChainReduce.class);
		jc.setOutputKeyClass(Text.class);
		jc.setOutputValueClass(Text.class);
	//	FileInputFormat.setInputPaths(jc, new Path(""));
		FileOutputFormat.setOutputPath(jc, new Path("c:/xx"));
		JobClient.runJob(jc);
	}
}
