package com.atguigu.bigdata.gmall.realtime.app.dws;

import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.bean.CourseDetail;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_04_CourseTradeAndUserWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_04_CourseTradeAndUserWindow().init(
                4007,
                2,
                "Dws_04_CourseTradeAndUserWindow"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {

        tEnv.executeSql("create table dwd_exam_with_question_detail(" +
                "id string," +
                "paper_title string," +
                "paper_id string," +
                "course_id string," +
                "question_score string," +
                "exam_score string," +
                "duration_sec string," +
                "user_id string," +
                "is_correct string," +
                "create_time string," +
                "ts bigint," +
                "row_op_ts string," +
                "et as to_timestamp_ltz(ts, 0)," +
                "watermark for et as et - interval '3' second "  +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EXAM_WITH_QUESTIONANDID_DETAIL, "Dws_04_CourseTradeAndUserWindow"));

        /*tEnv.executeSql("create table ods_db(" +
                "course_id string, " +
                " `pt` as proctime() " +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EXAM_DETAIL, "ods_db"));*/
       /* tEnv.executeSql("create table linshi(" +
                "course_id bigint," +
                "`pt` as proctime() " +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EXAM_DETAIL,"linshi"));*/

        /*Table table = tEnv.sqlQuery("select " +
                "course_id, " +
                "`pt` as proctime() " +
                "from dwd_exam_with_question_detail");
        tEnv.createTemporaryView("table",table);*/
        /*readBaseinfo(tEnv);*/
/*
        Table table1 = tEnv.sqlQuery("select " +
                "dd.course_id, " +
                "course_name " +
                "from linshi dd " +
                "join  course_info for system_time as of dd.pt as ci " +
                "on dd.course_id =ci.id");

        tEnv.createTemporaryView("table1",table1);*/
        //table1.execute().print();

     /*   Table infoJoin = tEnv.sqlQuery("select " +
                "dd.id ," +
                "paper_title ," +
                "paper_id ," +
                "dd.course_id ," +
                "course_name  ," +
                "question_score ," +
                "exam_score ," +
                "duration_sec ," +
                "user_id ," +
                "is_correct ," +
                "create_time ," +
                "ts ," +
                "row_op_ts ," +
                "et as to_timestamp_ltz(ts, 0), " +
                "watermark for et as et - interval '3' second " +
                "from dwd_exam_with_question_detail dd " +
                "join table1  ci " +
                "on dd.course_id = ci.course_id ");
        tEnv.createTemporaryView("infoJoin", infoJoin);
        infoJoin.execute().print();*/

        Table table = tEnv.sqlQuery("select " +
                "date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt," +
                "date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt," +
                "course_id courseId," +
                "count(course_id) userCount," +
                "sum(cast(exam_score as bigint))/count(course_id) avgScore," +
                "sum(cast(duration_sec as bigint))/count(course_id) avgExamTime," +
                "unix_timestamp() * 1000 ts " +
                "from table ( tumble(table dwd_exam_with_question_detail, descriptor(et), interval '5' second)) " +
                "group by course_id ,window_start, window_end"
        );
        //table.execute().print();

        tEnv.toRetractStream(table, CourseDetail.class)
                .filter(f -> f.f0)
                .map(m -> m.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_course_trade_userWindow", CourseDetail.class));
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }


    }


}
