package com.jkxy.hbase2;


import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class HBase2HDFS {
	public static class HbaseReaderMapper extends TableMapper<Text,Text>{

    @Override
    protected void map(ImmutableBytesWritable key,Result value,Context context)
            throws IOException, InterruptedException {
        StringBuffer sb = new StringBuffer("");
//        for(Entry<byte[],byte[]> entry:value.getFamilyMap("T1".getBytes()).entrySet()){
//            String str =  new String(entry.getValue());
//            //将字节数组转换为String类型
//            if(str != null){
//                sb.append(new String(entry.getKey()));
//                sb.append(":");
//                sb.append(str);
//            }
//            context.write(new Text(key.get()), new Text(new String(sb)));
//        }
        String driverIdCode="";
        int count=0;
        Double paid=null;
        String paidStr="";
        String offTimeStr="";
        for(Cell cell : value.rawCells()){
        	if("T1".equals(Bytes.toString(CellUtil.cloneFamily(cell)))){
        		if("driverIdCode".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))){
        			driverIdCode=Bytes.toString(CellUtil.cloneValue(cell));
        			context.write(new Text(driverIdCode), new Text(new String(sb)));
        		}
        		if("paid".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))){
//        			 paid=Bytes.toDouble(CellUtil.cloneValue(cell));
        			 paidStr=Bytes.toString(CellUtil.cloneValue(cell));
//        			if(paid<20 &&paid>10){
//        				count++;
//        			}
        		}
        		if("offTime".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))){
        			offTimeStr=Bytes.toString(CellUtil.cloneValue(cell));
        		}
        	}
        
        }
        if(null!=paidStr || !("".equals(paidStr))){
        	context.write(new Text(driverIdCode), new Text(new String(paidStr+"---"+offTimeStr+"")));
        }
        	 
    }
	}
	public static class HbaseReaderReduce extends Reducer<Text,Text,Text,Text>{
	    private Text result = new Text();
	    @Override
	    protected void reduce(Text key, Iterable<Text> values,Context context)
	            throws IOException, InterruptedException {
	        for(Text val:values){
	            result.set(val);
	            context.write(key, result);
	        }
	    }
	}
	public static void main(String[] args) throws Exception {
	    String tablename = "WYC_DCYY";
	    Configuration conf = HBaseConfiguration.create();
//	    conf.set("hbase.zookeeper.quorum", "Master");
//	    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
//	    if (otherArgs.length != 1) {
//	      System.err.println("Usage: WordCountHbaseReader <out>");
//	      System.exit(2);
//	    }
	    Job job = new Job(conf, "mapReduceTest");
	    job.setJarByClass(HBase2HDFS.class);
	    //设置任务数据的输出路径；
	    FileOutputFormat.setOutputPath(job, new Path("tjtest/output"));
	    job.setReducerClass(HbaseReaderReduce.class);
	    Scan scan = new Scan();
	    TableMapReduceUtil.initTableMapperJob(
	    		tablename,
	    		scan,
	    		HbaseReaderMapper.class, 
	    		Text.class, Text.class, job);
	    //调用job.waitForCompletion(true) 执行任务，执行成功后退出；
	    System.exit(job.waitForCompletion(true) ? 0 : 1);


	}
}
