package com.atguigu.bigdata.gmall.realtime.app.dws;/**
 * @Classname Dws_03_DwsEvalCourseWindow_Cache_Async
 * @Description TODO
 * @Date 2022/10/25 19:46
 * @Created by 86152
 */

import com.atguigu.bigdata.gmall.realtime.app.BaseAppV1;
import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.bean.EvalBean;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


/**
 * @author: Lime
 * @description evalCourse
 * @date: 2022/10/25 19:46
 */
public class Dws_03_DwsEvalCourseWindow_Cache_Async extends BaseSQLApp {

    public static void main(String[] args) {
        new Dws_03_DwsEvalCourseWindow_Cache_Async().init(
                3010,
                2,
                "Dws_03_DwsEvalCourseWindow_Cache_Async"

        );
    }


    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        //1.建立一个动态表与dwd评价课程关联
        TableResult dwd_Eval_Course = tEnv.executeSql("create table dwd_Eval_Course(\n" +
                " id bigint,\n" +
                " course_id bigint,\n" +
                " course_name string,\n" +
                " user_id string,\n" +
                " review_stars string,\n" +
                " ts bigint,\n" +
                " et as to_timestamp_ltz(ts, 0),\n" +   //3是毫秒0是秒
                " watermark for et as et - interval '3' second\n" +  //水印，
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_EVAL_COURSE, "Dws_03_DwsEvalCourseWindow_Cache_Async"));


//        tEnv.executeSql("select * from dwd_Eval_Course").print();



        //2.拿取自己需要的数据
        Table result = tEnv.sqlQuery("select\n" +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt,\n" +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt,\n" +
                " course_id,\n" +
                " course_name,\n" +
                " count(user_id) uv,\n" +
                " avg(cast(review_stars as bigint)) avg_starts,\n" +
                " sum(if(cast(review_stars as bigint) > 4,1,0)) hao_ping,\n" +
                " unix_timestamp() * 1000 ts " +
                " from table( tumble( table dwd_Eval_Course, descriptor(et), interval '5' second ) )\n" +
                "group by course_id,course_name, window_start, window_end");
//        result.execute().print();

        //3.写出到clickhouse中
        //把table转成流
        tEnv
            .toRetractStream(result, EvalBean.class)
                .filter(t -> t.f0)
                .map(t ->{
                    return t.f1;
                })
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_eval_course_window",EvalBean.class));
                try {
                    env.execute();
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }














//
//        Table result = tEnv.sqlQuery("select\n" +
//                " tumble_start(et, interval '5' second) stt,\n" +
//                " tumble_end(et, interval '5' second) edt,\n" +
//                " course_id,\n" +
//                " course_name,\n" +
//                " avg(review_starts) avg_starts,\n" +
//                " count(distinct user_id) uv,\n" +
//                " sum(if(review_starts>4,1,0)) user_idHao,\n" +
//                " unix_timestamp() * 1000 ts\n" +
//                "from dwd_Eval_Course\n" +
//                "group by course_id,course_name, tumble(et, interval '5' second)");
//
//                        tEnv.createTemporaryView("result",result);
//                tEnv.executeSql("select * from result").print();


    }
}
