package com.qingyunge.app.dwd;

import com.qingyunge.bean.SkuEvalInfo;
import com.qingyunge.bean.SkuInfo;
import com.qingyunge.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdSkuJoinProduceEval {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        String productEval_topic = "dwd_traffic_eval";
        String sku_topic = "dwd_traffic_sku";
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_sku ( " +
                "  `database` STRING, " +
                "  `table` STRING, " +
                "  `type` STRING, " +
                "  `data` MAP<STRING,STRING>, " +
                "  `before-data` MAP<STRING,STRING>, " +
                "  `ts` AS PROCTIME() " +
                ") " +
                MyKafkaUtil.getKafkaDDL(sku_topic, "dwd_traffic_sku"));
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_eval ( " +
                "  `database` STRING, " +
                "  `table` STRING, " +
                "  `type` STRING, " +
                "  `data` MAP<STRING,STRING>, " +
                "  `before-data` MAP<STRING,STRING>, " +
                "  `ts` BIGINT " +
                ") " +
                MyKafkaUtil.getKafkaDDL(productEval_topic, "dwd_traffic_eval"));

        Table dwd_traffic_sku_table = tableEnv.sqlQuery("" +
                "select " +
                " data['id'] id," +
                " data['name'] name," +
                " data['price'] price," +
                " data['img_url'] img_url," +
                " data['eval_type'] eval_type," +
                " data['eval_num'] eval_num," +
                " data['por_view'] por_view " +
                " from dwd_traffic_sku where `database`='flinkdata' and `table`='sku_info' ");

        Table dwd_traffic_eval = tableEnv.sqlQuery(" " +
                "select  " +
                " data['id'] id, " +
                " data['sku_id'] sku_id, " +
                " data['evaluate'] evaluate, " +
                " data['proconf'] proconf, " +
                " data['date'] `date`, " +
                " data['location'] location " +
                "from dwd_traffic_eval where `database`='flinkdata' and `table`='product_eval'");
        Table joinedTable = tableEnv.sqlQuery("" +
                "SELECT " +
                "  e.data['id'] AS id, " +
                "  e.data['sku_id'] AS skuId, "+
                "  s.data['name'] AS name, " +
                "  s.data['price'] AS price, " +
                "  s.data['img_url'] AS imgUrl, " +
                "  s.data['eval_type'] AS evalType, " +
                "  s.data['eval_num'] AS evalNum, " +
                "  s.data['por_view'] AS porView, " +
                "  e.data['evaluate'] AS evaluate, " +
                "  e.data['proconf'] AS proconf, " +
                "  e.data['date'] AS `date`, " +
                "  e.data['location'] AS location, " +
                "  e.ts AS ts " +
                "  FROM dwd_traffic_sku AS s " +
                "JOIN dwd_traffic_eval AS e ON s.data['id'] = e.data['sku_id'] " +
                "WHERE s.`database` = 'flinkdata' AND s.`table` = 'sku_info' AND e.`database` = 'flinkdata' AND e.`table` = 'product_eval' ");

        tableEnv.createTemporaryView("SkuEvalInfo",joinedTable);


        TableResult sku_eval_topic = tableEnv.executeSql("create table SkuEvalInfoTable( " +
                "    `id` string,  " +
                "     `skuId` string, "+
                "    `name` string,  " +
                "    `price` string,  " +
                "    `imgUrl` string,  " +
                "    `evalType` string,  " +
                "    `evalNum` string,  " +
                "    `porView` string,  " +
                "    `evaluate` string,  " +
                "    `proconf` string,  " +
                "    `date` string,  " +
                "    `location` string, " +
                "     `ts` BIGINT "+
                ") " + MyKafkaUtil.getKafkaSinkDDL("sku_eval_topic"));

        //TODO 写入kafka
        tableEnv.executeSql("insert into SkuEvalInfoTable select * from SkuEvalInfo");

//        DataStream<SkuInfo> skuInfoDS = tableEnv.toAppendStream(dwd_traffic_sku_table, SkuInfo.class);
//        skuInfoDS.print();
        DataStream<SkuEvalInfo> skuEvalInfoDS = tableEnv.toAppendStream(joinedTable, SkuEvalInfo.class);
        skuEvalInfoDS.print();
        env.execute("DwdSkuJoinProduceEval");
    }
}
