package cn.doitedu.rtdw.data_sync;

import cn.doitedu.rtdw.udf.ArraySer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: deep as the sea
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2023/4/11
 * @Desc: 学大数据，到多易教育
 * 广告引擎上记录的 广告请求 特征数据 日志  ==>  hbase
 **/
public class SyncJob04_AdRequestFeatures2Hbase {
    public static void main(String[] args) {

        // 创建编程入口环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/checkpoint");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 创建kafka连接器表，映射 kafka中的请求特征日志数据topic
        // {"ad_tracking_id":"tk001","ad_id":"ad01","ad_features":[1,0,4,1,1,1,0,4,1],"user_features":[3,0,6,1,1,2,0,1,1],"requestTime":1678678301000}
        tenv.executeSql(
                "  CREATE TABLE ad_request_kafka (                                   "
                        + "     ad_tracking_id    STRING,                     "
                        + "     ad_id             STRING,                     "
                        + "     ad_features       ARRAY<double>,              "
                        + "     user_features     ARRAY<double>,              "
                        + "     requestTime       BIGINT                      "
                        + " ) WITH (                                          "
                        + "  'connector' = 'kafka',                           "
                        + "  'topic' = 'ad-request-log',                      "
                        + "  'properties.bootstrap.servers' = 'doitedu:9092', "
                        + "  'properties.group.id' = 'testGroup1',            "
                        + "  'scan.startup.mode' = 'latest-offset',           "
                        + "  'value.format'='json',                           "
                        + "  'value.json.fail-on-missing-field'='false',      "
                        + "  'value.fields-include' = 'EXCEPT_KEY')           ");


        // 创建hbase连接器表，映射 hbase中存放请求特征日志数据的目标表 ad_request_log
        tenv.executeSql(
                "CREATE TABLE request_hbase ( " +
                        " ad_tracking_id   STRING, " +
                        " f ROW<ad_id STRING,ad_features BYTES,user_features BYTES,requestTime BIGINT>, " +
                        " PRIMARY KEY (ad_tracking_id) NOT ENFORCED " +
                        ") WITH (                             " +
                        " 'connector' = 'hbase-2.2',          " +
                        " 'table-name' = 'ad_request_log',     " +
                        " 'zookeeper.quorum' = 'doitedu:2181' " +
                        ")");

        tenv.createTemporaryFunction("ser", ArraySer.class);

        tenv.executeSql(
                "INSERT INTO request_hbase " +
                        "WITH tmp AS (" +
                        "SELECT ad_id, " +
                        "ad_tracking_id," +
                        "ser(ad_features) as ad_features," +
                        "ser(user_features) as user_features," +
                        "requestTime " +
                        "FROM ad_request_kafka )" +
                        "SELECT ad_tracking_id,ROW(ad_id,ad_features,user_features,requestTime) as f FROM tmp"
        );


    }
}
