package com.hdaccp.hadoop.mrhbase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class TableMapperDemo {
	static class MyMapper extends TableMapper<Text, Text> {
        /**
         *
         * @param key 行键
         * @param value
         * @param context
         * @throws IOException
         * @throws InterruptedException
         * hbase : music 2行数据 将执行2次
         */
		@Override
		protected void map(ImmutableBytesWritable key, Result value,
				Context context) throws IOException, InterruptedException {
			List<Cell> cells=value.listCells();
			for(Cell cell:cells){
				String outValue=String.format("RowKey:%s    Family:%s    Qualifier:%s    cellValue:%s  ", 
						Bytes.toString(key.get()),
						Bytes.toString(CellUtil.cloneFamily(cell)),
						Bytes.toString(CellUtil.cloneQualifier(cell)),
						Bytes.toString(CellUtil.cloneValue(cell))
						);
				context.write(
                        new Text("hi--"), //获取 单元格的key.不是行键,也不是列键
                        new Text(outValue));
			}
		}
	}

	public static void m1(String[] args) throws IOException, ClassNotFoundException, InterruptedException{
		//默认读resource /hbase.site.xml
	    Configuration conf = HBaseConfiguration.create();
        //声明一个作业
        Job job = Job.getInstance(conf, "hbase-mapreduce-api");
        
        //MapReduce程序作业基本配置
        job.setJarByClass(TableMapperDemo.class); 
        
        Path output=new Path("/ch05/out2");
		if(FileSystem.get(conf).exists(output))
			FileSystem.get(conf).delete(output,true);
		FileOutputFormat.setOutputPath(job, output);
        //设置任务数
		job.setNumReduceTasks(1);

		//使用hbase提供的工具类来设置job
		Scan scan=new Scan();
		scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
		scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("singer"));
        scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("gender"));
        scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("ryghme"));
        scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("terminal"));
        //
		TableMapReduceUtil.initTableMapperJob("music", scan, MyMapper.class, Text.class, Text.class, job,false);
        //提交作业
        job.waitForCompletion(true);
		
	}
	
	public static void main(String[] args) throws IOException,
			ClassNotFoundException, InterruptedException {

	    m1(args);
	
	
	}

	
}
