package com.bw.gmall.app.dws;

import com.bw.gmall.app.fure.SplitFunction;
import com.bw.gmall.bean.DiYiZgiErBao;
import com.bw.gmall.utils.MyClickHouseUtil;
import com.bw.gmall.utils.MyKafkaUtil;
import com.bw.gmall.utils.MysqlUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsDiErZhiBiao {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("create table action_log( " +
                "    `common` map<string,string>, " +
                "    `action` map<string,string>, " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL("dwd_traffic_action_log", "dwd_traffic_action_log_1565"));

        tableEnv.executeSql("create table sku_info( " +
                "`after` map<string,string>)" + MyKafkaUtil
                .getKafkaDDL("dwd_sku_info_add", "dwd_sku_info_add_1565"));

        Table table = tableEnv.sqlQuery("select " +
                "`page`['item'] as sku_id," +
                "`common`['uid'] as uid," +
                "`common`['is_new'] as is_new," +
                "`common`['ch'] as ch," +
                "`action`['action_id'] as action_id," +
                "rt " +
                "from action_log " +
                "where `page`['item'] is not null");

        Table sku_info = tableEnv.sqlQuery("select " +
                "`after`['id'] as id," +
                "`after`['sku_name'] as sku_name " +
                "from sku_info");

        tableEnv.createTemporaryView("zho",table);
        tableEnv.createTemporaryView("sname",sku_info);

        Table xiadan = tableEnv.sqlQuery("" +
                "select " +
                "a1.uid," +
                "a1.sku_id," +
                "a1.is_new," +
                "COALESCE(a1.ch,'0') as ch, " +
                "COALESCE(a1.action_id,'0') as action_id, " +
                "COALESCE(a2.sku_name,'0') as sku_name, " +
                "DATE_FORMAT(TO_TIMESTAMP(tt, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') as tt " +
                "from(" +
                "select " +
                "SPLIT_INDEX(sku_id, ',', 0) as sku_id," +
                "is_new," +
                "uid," +
                "ch," +
                "action_id," +
                "DATE_FORMAT(rt, 'yyyy-MM-dd HH:mm:ss') as tt " +
                "from zho )a1 " +
                "left join sname a2 on a1.sku_id=a2.id");

        DataStream<Tuple2<Boolean, DiYiZgiErBao>> diYiZgiErBaoDataStream = tableEnv.toRetractStream(xiadan, DiYiZgiErBao.class);
        diYiZgiErBaoDataStream.print();
        diYiZgiErBaoDataStream.map(a->a.f1)
                .addSink(MyClickHouseUtil
                .getSinkFunction("insert into table dwd_di_yi_zhou_er values(?,?,?,?,?,?,?)"));

        env.execute();
    }
}
