package com.atguigu.bigdata.gmall.realtime.app.dws;

import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.bean.ExamDetail;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_04_TestPaperAndUserWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_04_TestPaperAndUserWindow().init(
                4005,
                2,
                "Dws_04_TestPaperAndUser"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        //1.建立一个动态表与dwd页面日志关联
       /* tEnv.executeSql("create table dwd_exam_with_question_detail(" +
                "id string, " +
                "paper_title string, " +
                "paper_id string, " +
                "course_id string, " +
                "question_score string," +
                "user_id string," +
                "is_correct string " +
                "create_time string," +
                "ts bigint," +
                "row_op_ts string," +
                "et as to_timestamp_ltz(ts,0)," +
                "watermark for et as et - interval '3' second "+//定义时间属性，事件时间
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EXAM_WITH_QUESTION_DETAIL,
                "Dws_04_ExamAndCourse"));*/
        tEnv.executeSql("create table dwd_exam_detail(" +
                "id string," +
                "paper_title string," +
                "paper_id string," +
                "course_id string," +
                "score string," +
                "duration_sec string," +
                "create_time string," +
                "ts bigint," +
                "et as to_timestamp_ltz(ts,0)," +
                "watermark for et as et - interval '3' second " +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EXAM_DETAIL, "dwd_exam_detail"));

        Table table = tEnv.sqlQuery("select " +
                "date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                "date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                "paper_id paperId, " +
                "paper_title paperTitle, " +
                "count(paper_id) userCount," +
                "sum(cast(score as bigint))/count(paper_id) avgScore," +
                "sum(cast(duration_sec as bigint))/count(paper_id) avgUseTime," +
                "unix_timestamp() * 1000 ts " +
                "from table( tumble(table dwd_exam_detail,descriptor(et),interval '5' second )) " +
                "group by paper_id,paper_title,window_start,window_end ");
        table.execute().print();
//        tEnv.toRetractStream(table, ExamDetail.class)
//                .filter(t -> t.f0)
//                .map(t -> t.f1)
//                .addSink(FlinkSinkUtil.getClickHouseSink("dws_exam_userSum_window", ExamDetail.class));


       /* try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }*/
    }
}
