package org.myfram.flink.flinkonjar.application.job;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.myfram.flink.flinkonjar.common.connector.hbase.source.HBaseSourceBuilder;
import org.myfram.flink.flinkonjar.common.properties.HBaseProperties;
import org.myfram.flink.flinkonjar.common.util.YamlUtils;
import org.myfram.flink.flinkonjar.domain.entity.HBaseRowConvertTable;
import org.myfram.flink.flinkonjar.domain.entity.HBaseSourceUserTable;

public class QueryHBaseSinkJob extends BaseFlinkJob{


    @Override
    protected void process(StreamExecutionEnvironment env) throws Exception {
        HBaseProperties hBaseProperties = YamlUtils.getObject("flink.hbase", HBaseProperties.class);
        SourceFunction<HBaseSourceUserTable> hbaseSource = new HBaseSourceBuilder<HBaseSourceUserTable>(hBaseProperties)
                .setTableName("test_table")
                .addColumnFamily("info")
                .addColumn("info", "name")
                .setCacheSize(500)
                .setHbaseRowConvert(new HBaseRowConvertTable())
                .build();
        DataStreamSource<HBaseSourceUserTable> source = env.addSource(hbaseSource, "hbaseSource", TypeInformation.of(HBaseSourceUserTable.class));
        source.print();


    }
}
