package com.allen.flink.dbus.fullpuller;

import com.allen.flink.dbus.config.GlobalConfig;
import com.allen.flink.dbus.utils.JdbcUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormat;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.io.jdbc.split.NumericBetweenParametersProvider;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.types.Row;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

/**
 * 功能:全量拉取模块
 * flink版 Sqoop
 * flink-jdbc
 *
 * @date: 2020-03-26 14:49
 * @author: Allen
 * @version: 0.0.4-snapshot
 * @Email: allenZyhang@163.com
 * @since: JDK 1.8
 **/
public class FullPullerApp {
    
    public final static boolean IS_PARALLELISM = true;
    
    public final static String SPLIT_FIELD = "goodsId";
    public final static RowTypeInfo ROW_TYPE_INFO = new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.BIG_DEC_TYPE_INFO,
        BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    
    public static void main(String[] args) throws Exception {
        //获取实现环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        
        //读取商品表
        JDBCInputFormat.JDBCInputFormatBuilder builder = JDBCInputFormat.buildJDBCInputFormat().setDrivername(GlobalConfig.DRIVER_CLASS).setDBUrl(GlobalConfig.DB_URL).setUsername(
            GlobalConfig.USER_MAME).setPassword(GlobalConfig.PASSWORD).setQuery("select * from goods").setRowTypeInfo(ROW_TYPE_INFO);
        
        if (IS_PARALLELISM) { //是否分片
            
            int fetchSize = 2;//分片数量
            
            Boundary boundary = boundaryQuery(SPLIT_FIELD);
            
            builder.setQuery("select * from goods where " + SPLIT_FIELD + " between ? and ?").setParametersProvider(
                new NumericBetweenParametersProvider(fetchSize, boundary.min, boundary.max));
        }
        
        //读取Mysql 数据
        DataSource<Row> source = env.createInput(builder.finish());
        
        //生成HBase输出数据
        DataSet<Tuple2<Text, Mutation>> hbaseResult = convertMysqlToHBase(source);
        
        //数据输出到HBase
        org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", GlobalConfig.HBASE_ZOOKEEPER_QUORUM);
        conf.set("hbase.zookeeper.property.clientPort", GlobalConfig.HBASE_ZOOKEEPER_PROPERTY_CLIENT_PORT);
        conf.set("zookeeper.znode.parent", GlobalConfig.ZOOKEEPER_ZNODE_PARENT);
        conf.set(TableOutputFormat.OUTPUT_TABLE, "learing_flink:goods");
        conf.set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
        
        Job job = Job.getInstance(conf);
        
        hbaseResult.output(new HadoopOutputFormat<>(new TableOutputFormat<>(), job));
        
        env.execute("FullPullerApp");
    }
    
    private static Boundary boundaryQuery(final String splitField) {
        String sql = "select min(" + splitField + "), max(" + splitField + ") from goods";
        Connection conn = null;
        PreparedStatement stmt = null;
        ResultSet rs = null;
        int min = 0;
        int max = 0;
        try {
            conn = JdbcUtil.getConn();
            stmt = JdbcUtil.getPreparedStatement(conn, sql);
            rs = stmt.executeQuery(sql);
            while (rs.next()) {
                min = rs.getInt(1);
                max = rs.getInt(2);
                System.out.println(min + "------------------" + max);
            }
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            JdbcUtil.closeAll(conn, stmt, rs);
        }
        return Boundary.of(min, max);
    }
    
    private static DataSet<Tuple2<Text, Mutation>> convertMysqlToHBase(final DataSource<Row> source) {
        
        return source.map(new RichMapFunction<Row, Tuple2<Text, Mutation>>() {
            
            private transient Tuple2<Text, Mutation> resultTp;
            
            private byte[] cf = "F".getBytes(ConfigConstants.DEFAULT_CHARSET);
            
            @Override
            public void open(final Configuration parameters) throws Exception {
                resultTp = new Tuple2<>();
            }
            
            @Override
            public Tuple2<Text, Mutation> map(final Row row) throws Exception {
                resultTp.f0 = new Text(row.getField(0).toString());
                Put put = new Put(row.getField(0).toString().getBytes(ConfigConstants.DEFAULT_CHARSET));
                if (null != row.getField(1)) {
                    put.addColumn(cf, Bytes.toBytes("goodsName"), Bytes.toBytes(row.getField(1).toString()));
                }
                put.addColumn(cf, Bytes.toBytes("sellingPrice"), Bytes.toBytes(row.getField(2).toString()));
                put.addColumn(cf, Bytes.toBytes("goodsStock"), Bytes.toBytes(row.getField(3).toString()));
                put.addColumn(cf, Bytes.toBytes("appraiseNum"), Bytes.toBytes(row.getField(4).toString()));
                resultTp.f1 = put;
                return resultTp;
            }
        });
        
    }
    
    public static class Boundary {
        private int min;
        private int max;
        
        public Boundary(final int min, final int max) {
            this.min = min;
            this.max = max;
        }
        
        public static Boundary of(int min, int max) {
            return new Boundary(min, max);
        }
    }
}
