package com.event.ingestion.common;

import com.event.ingestion.config.LoadConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * @author ZzHh
 * @Classname HBaseWriter
 * @Description TODO
 * @Date:2020/06/18 11:35
 * @Create by IntelliJ IDEA
 **/


public class HBaseWriter implements Persistable {
    //core-site.xml
    private String coreSite = null;

    //hdfs-site.xml
    private String hdfsSite = null;

    //hbase-site.xml
    private String hbaseSite = null;

    //指定table
    private String hbTable = null;

    //解析数据
    private Parsable<Put> parser = null;

    //constructor(构造函数)
    public HBaseWriter(String hbTable,Parsable<Put> parser) {
        this.hbTable = hbTable;
        this.parser = parser;
    }

    //初始化 configuration
    @Override
    public void initialize(Properties properties) {
        this.coreSite = properties.getProperty(LoadConfig.coreSite);
        this.hdfsSite = properties.getProperty(LoadConfig.hdfsSite);
        this.hbaseSite = properties.getProperty(LoadConfig.hbaseSite);
    }

    @Override
    public int write(ConsumerRecords<String, String> records) throws Exception {
        //写到hbase的数据
        int numPuts = 0;
        if (this.hbaseSite == null || this.hbaseSite.isEmpty()) {
            throw new Exception("hbase-site.xml 未被初始化!");
        }

        //参数设置
        Configuration configuration = HBaseConfiguration.create();
        if (this.coreSite != null) {
            configuration.addResource(new Path(this.coreSite));
        }
        if (this.hdfsSite != null) {
            configuration.addResource(new Path(this.hdfsSite));
        }
        configuration.addResource(new Path(this.hbaseSite));

        //create connection
        Connection connection = ConnectionFactory.createConnection(configuration);
        try {
            //获取
            Table table = connection.getTable(TableName.valueOf(this.hbTable));
            try {
                List<Put> puts = new ArrayList<>();
                //flags
                long passHead = 0;
                for (ConsumerRecord<String, String> record : records) {
                    //解析数据
                    String [] elements = record.value().split(",",-1);
                    if (passHead == 0 && this.parser.isHeader(elements)) {
                        passHead = 1;
                        continue;
                    }
                    //判断巨鹿是否有效
                    if (this.parser.isValid(elements)) {
                        //数据绑定为Put对象
                        puts.add(this.parser.parse(elements));
                    }
                }
                if (puts.size() > 0) {
                    //保存数据到HBase
                    table.put(puts);
                }
                numPuts = puts.size();
            }finally{
                table.close();
            }
        }finally{
            connection.close();
        }
        return numPuts;
    }
}