package com.allen.flink.batch.connector;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

/**
 * 功能: flink 写入HBase
 *
 * @date: 2020-03-26 11:25
 * @author: Allen
 * @version: 0.0.4-snapshot
 * @Email: allenZyhang@163.com
 * @since: JDK 1.8
 **/
public class WriteToHBase {
    
    public static void main(String[] args) throws Exception {
        //获取执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        
        //读取数据
        DataSet<Tuple4<String, String, Integer, String>> users = env.fromElements(Tuple4.of("1000", "Alex", 18, "beijing"), Tuple4.of("1001", "Alan", 20, "shanghai"),
            Tuple4.of("1002", "Mark", 21, "hubei"), Tuple4.of("1003", "Marry", 18, "hunan"), Tuple4.of("1004", "Mars", 18, "tianjin"), Tuple4.of("1005", "Lili", 25, "beijing"));
        
        //转换为HBase所需数据格式
        DataSet<Tuple2<Text, Mutation>> result = convertResultToMutation(users);
        
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        
        config.set("hbase.zookeeper.quorum", "master,slave1,slave2");
        config.set("hbase.zookeeper.property.clientPort", "2181");
        config.set(TableOutputFormat.OUTPUT_TABLE, "learing_flink:users");
        config.set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
        
        Job job = Job.getInstance(config);
        result.output(new HadoopOutputFormat<>(new TableOutputFormat<>(), job));
        env.execute(" write to hbase");
    }
    
    public static DataSet<Tuple2<Text, Mutation>> convertResultToMutation(DataSet<Tuple4<String, String, Integer, String>> user) {
        return user.map(new RichMapFunction<Tuple4<String, String, Integer, String>, Tuple2<Text, Mutation>>() {
            
            private transient Tuple2<Text, Mutation> resultTp;
            
            private byte[] cf = "F".getBytes(ConfigConstants.DEFAULT_CHARSET);
            
            @Override
            public Tuple2<Text, Mutation> map(final Tuple4<String, String, Integer, String> user) throws Exception {
                resultTp.f0 = new Text(user.f0);
                Put put = new Put(user.f0.getBytes(ConfigConstants.DEFAULT_CHARSET));
                if (null != user.f1) {
                    put.addColumn(cf, Bytes.toBytes("name"), Bytes.toBytes(user.f1));
                }
                put.addColumn(cf, Bytes.toBytes("age"), Bytes.toBytes(user.f2.toString()));
                
                if (null != user.f3) {
                    put.addColumn(cf, Bytes.toBytes("address"), Bytes.toBytes(user.f3));
                    
                }
                resultTp.f1 = put;
                return resultTp;
            }
            
            @Override
            public void open(final Configuration parameters) throws Exception {
                resultTp = new Tuple2<>();
            }
        });
    }
}
