package sink;

import beans.TableProcess;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import utils.DruidDSUtil;

import java.sql.PreparedStatement;

public class PhoenixSink extends RichSinkFunction<Tuple2<JSONObject, TableProcess>> {
    DruidDataSource dataSource;

    @Override
    public void open(Configuration parameters) throws Exception {
        //1、获取phoenix连接
        dataSource = DruidDSUtil.getDataSource();
    }

    @Override
    public void invoke(Tuple2<JSONObject, TableProcess> value, Context context) throws Exception {
        JSONObject data = value.f0;
        TableProcess tp = value.f1;
        //1、获取连接
        DruidPooledConnection conn = dataSource.getConnection();

        //2、拼接sql语句
        StringBuilder sql = new StringBuilder();

        //3、从一个连接中获取执行一条预处理语句
        sql.append("upsert into ").append(tp.getSinkTable()).append("(")
                // 补充字段名
                .append(tp.getSinkColumns()).append(")values(")
                // 补充我?号
                .append(tp.getSinkColumns().replaceAll("[^,]+", "?")).append(")");

        PreparedStatement ps = conn.prepareStatement(sql.toString());
        //3.1 给占位符赋值
        String[] columns = tp.getSinkColumns().split(",");
        for (int i = 0; i < columns.length; i++) {
            String column = columns[i];
            String v = data.getString(column);
            ps.setString(i + 1, v); //循环从0开始，列从1开始
        }


        //4、执行预处理语句
        ps.execute();

        //5、手动提交
        conn.commit();

        //6、关闭预处理语句
        ps.close();

        //7、归还
        conn.close();
    }
}
