package org.myfram.flink.flinkonjar.common.connector.hbase.source;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.hbase.util.HBaseConfigurationUtil;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.myfram.flink.flinkonjar.common.config.HBaseConfig;
import org.myfram.flink.flinkonjar.common.connector.hbase.HBaseRowConvert;
import org.myfram.flink.flinkonjar.common.properties.HBaseProperties;
import org.myfram.flink.flinkonjar.common.util.YamlUtils;

import java.util.List;

public class HBaseSource<T> implements SourceFunction<T> {

    private final String tableName;
    private final HBaseRowConvert<T> hBaseRowConvert;
    private final byte[] serializedConfig;
    private volatile boolean isRunning = true;

    private final List<String> columnFamilies;
    private final List<Tuple2<String, String>> columns;
    private final int cacheSize;
    private final long timeRangeStart;
    private final long timeRangeEnd;

    public HBaseSource(String tableName, HBaseRowConvert<T> hBaseRowConvert, Configuration hbaseConfig, List<String> columnFamilies, List<Tuple2<String, String>> columns, int cacheSize, long timeRangeStart, long timeRangeEnd) {
        this.tableName = tableName;
        this.hBaseRowConvert = hBaseRowConvert;
        this.serializedConfig = HBaseConfigurationUtil.serializeConfiguration(hbaseConfig);
        this.columnFamilies = columnFamilies;
        this.columns = columns;
        this.cacheSize = cacheSize;
        this.timeRangeStart = timeRangeStart;
        this.timeRangeEnd = timeRangeEnd;
    }

    @Override
    public void run(SourceContext<T> ctx) throws Exception {
        Scan scan = new Scan();
        scan.setCaching(cacheSize);
        scan.setTimeRange(timeRangeStart, timeRangeEnd);
        for (Tuple2<String, String> column : columns) {
            scan.addColumn(Bytes.toBytes(column.f0), Bytes.toBytes(column.f1));
        }
        for (String family : columnFamilies) {
            scan.addFamily(Bytes.toBytes(family));
        }
        HBaseProperties object = YamlUtils.getObject("flink.hbase", HBaseProperties.class);
        Configuration hbaseConfig = HBaseConfig.getSourceConf(object);
        try (Connection connection = ConnectionFactory.createConnection(hbaseConfig);
             Table table = connection.getTable(TableName.valueOf(tableName));
             ResultScanner scanner = table.getScanner(scan)) {

            for (Result result : scanner) {
                if (!isRunning) break;
                T output = hBaseRowConvert.convertToResult(result);
                ctx.collect(output);
            }
        }
    }

    @Override
    public void cancel() {
        isRunning = false;
    }
}
