package com.atguigu.gmall.app.dws.examination;

import com.atguigu.gmall.bean.DwsExaminationScoreBean;
import com.atguigu.gmall.bean.ExaminationBeanTwo;
import com.atguigu.gmall.utils.ClickHouseUtil;
import com.atguigu.gmall.utils.SQLUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsExaminationScore {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(env);
        String topicName = "topic_db";
        String groupID = "DwdExamination";
        streamTableEnvironment.executeSql(" create table topic_db( " +
                " `database` string, " +
                " `table` string, " +
                " `type` string, " +
                " `data` map<string,string>, " +
                " `ts` bigint, " +
                "  rt AS TO_TIMESTAMP_LTZ(ts*1000, 3), " +
                "  WATERMARK FOR  rt AS rt - INTERVAL '2' SECOND " +
                " ) "
                + SQLUtil.getKafkaSource(topicName,groupID));

        Table table = streamTableEnvironment.sqlQuery(
                " select " +
                        "  DATE_FORMAT(TUMBLE_START(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                        "  DATE_FORMAT(TUMBLE_END(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss')  edt, " +
                        " paper_id," +
                        " sum(if(score < 60,1,0)) `s_0_59`, " +
                        " sum(if(score >= 60 and score < 70,1,0)) `s_60_69`, " +
                        " sum(if(score >= 70 and score < 80,1,0)) `s_70_79`, " +
                        " sum(if(score >= 80 and score < 90,1,0)) `s_80_89`, " +
                        " sum(if(score >= 90 and score <= 100,1,0)) `s_90_100`, " +
                        " unix_timestamp()*1000 ts " +
                        " from( "+
                        " select " +
                        " data['id'] id, " +
                        " data['paper_id'] paper_id, " +
                        " data['user_id'] user_id, " +
                        " data['score'] score, " +
                        " data['duration_sec'] duration_sec," +
                        " rt " +
                        " from topic_db " +
                        " where `table` = 'test_exam' " +
                        " and `type` = 'insert') t "  +
                        " group by paper_id, " +
                        " TUMBLE(rt, INTERVAL '10' SECOND) ");

        DataStream<DwsExaminationScoreBean> examinationBeanTwoDataStream = streamTableEnvironment.toAppendStream(table, DwsExaminationScoreBean.class);
        examinationBeanTwoDataStream.print();
        String sql = "insert into dws_examination_score values(?,?,?,?,?,?,?,?,?)";
        examinationBeanTwoDataStream.addSink(ClickHouseUtil.<DwsExaminationScoreBean>getJdbcSink(sql));
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
