package com.song.hbasechap_7_6;

import javax.ws.rs.PUT;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

	/**
	 * 
	 *  作用很简单，只是把新建的数据倒数到 hbase 里面
	 * @author song
	 *
	 */
	public class ImportMain extends Configured implements Tool{

		
		
		public static void main(String[] args) {
			
			Configuration conf = new Configuration();
			try {
				ToolRunner.run(conf,  new ImportMain(), args);
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		
		@Override
		public int run(String[] args) throws Exception {
			
			Configuration conf = HBaseConfiguration.create();
			
			conf.set("hbase.master", "macsong:60000");
			conf.set("hbase.zookeeper.quorum", "macsong");
			
			
			Job job = new Job(conf, "data_char_7_6 -import data");
			job.setJarByClass(ImportMain.class);
			job.setMapperClass(ImportDataMapper.class);
			job.setOutputFormatClass(TableOutputFormat.class);
			job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, "data_char_7_6");
			job.setOutputKeyClass(ImmutableBytesWritable.class);
			job.setOutputValueClass(PUT.class);
			job.setNumReduceTasks(0);
			
			
			
			
			FileInputFormat.addInputPath(job, new Path("hdfs://macsong:9000/chap7_6/data"));
			
			return job.waitForCompletion(true) ? 0 :1;
		}


}
