package org.myfram.flink.flinkonjar.domain.entity.KafkaJoinHBase;

import java.io.Serializable;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.myfram.flink.flinkonjar.common.connector.hbase.HBaseTableDefinition;

public class HBaseSinkTable extends HBaseTableDefinition<String> implements Serializable {

    private static final long serialVersionUID = 1L;
    private static final ObjectMapper objectMapper = new ObjectMapper();

    // 必须有无参构造函数
    public HBaseSinkTable() {
        super("test_flink_jar_sink"); // 目标表名
    }

    @Override
    protected void defineSchema() {
        addColumn("info", "id", byte[].class);
        addColumn("info", "name", byte[].class);
    }

    @Override
    public Mutation convertToMutation(String value) {
        JsonNode rootNode = null;
        try {
            rootNode = objectMapper.readTree(value);
        } catch (JsonProcessingException e) {
            throw new RuntimeException(e);
        }
        String id = rootNode.path("id").asText();
        String name = rootNode.path("name").asText();
        Put put = new Put(Bytes.toBytes(id));
        put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("id"), Bytes.toBytes(id));
        put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes(name));

        return put;
    }
}