package dwd_test;

import com.qingyunge.bean.Sku;
import com.qingyunge.common.FlinkPhConfig;
import com.qingyunge.common.KafkaConfig;
import com.qingyunge.common.MysqlConfig;
import com.qingyunge.util.MyClickHouseUtil;
import com.qingyunge.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.w3c.dom.TypeInfo;

public class Dwd_Test_table {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.executeSql("" +
                "CREATE TABLE topic_db ( " +
                "  `database` STRING, " +
                "  `table` STRING, " +
                "  `type` STRING, " +
                "  `data` MAP<STRING,STRING>, " +
                "  `before-data` MAP<STRING,STRING>, " +
                "  `ts` AS PROCTIME() " +
                ") " +
                MyKafkaUtil.getKafkaDDL(KafkaConfig.TOPIC_ID, "Dwd_Test_table"));
        Table table = tableEnv.sqlQuery("" +
                "select data['id'] id," +
                "data['name'] name " +
                "from topic_db where `database`='flinkdata' and `table`='sku_info' ");
        tableEnv.createTemporaryView("sku_info",table);
        table.printSchema();

        DataStream<Sku> skuDataStream = tableEnv.toAppendStream(table, Sku.class);
        skuDataStream.print("sku=========");
        skuDataStream.addSink(MyClickHouseUtil.getSinkFunction("insert into sku_test VALUES(?,?)"));
        env.execute("Dwd_Test_table");
    }
}
