package com.atguigu.gmall.app.dws.log;

import com.atguigu.gmall.bean.TableBean;
import com.atguigu.gmall.bean.TableTwoBean;
import com.atguigu.gmall.utils.ClickHouseUtil;
import com.atguigu.gmall.utils.SQLUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsTimeSharingStatisticsFlow {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        tEnv.getConfig().setIdleStateRetention(Duration.ofMinutes(15));

        String topicName = "dwd_traffic_page_log";
        String groupID = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_page_log( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint, " +
                " `rt` AS TO_TIMESTAMP_LTZ(ts,3), " +
                " WATERMARK FOR rt AS rt - INTERVAL '0' SECOND " +
                ")" + SQLUtil.getKafkaSource(topicName,groupID));

        String topicName2 = "dwd_traffic_unique_visitor_detail";
        String groupID2 = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_unique_visitor_detail( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint " +
                ")" + SQLUtil.getKafkaSource(topicName2,groupID2));



        String topicName3 = "dwd_traffic_user_jump_detail";
        String groupID3 = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_user_jump_detail( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint " +
                ")" + SQLUtil.getKafkaSource(topicName3,groupID3));

        Table table1 = tEnv.sqlQuery(" " +
                " select " +
                " is_new," +
                " count(*) midCount " +
                " from( " +
                " select  " +
                " common['is_new'] is_new " +
                " from dwd_traffic_unique_visitor_detail) t " +
                " group by is_new ");
        tEnv.createTemporaryView("tyuut",table1);

        Table table2 = tEnv.sqlQuery(" " +
                " select " +
                " is_new," +
                " count(*) jumpCount " +
                " from( " +
                " select  " +
                " common['is_new'] is_new " +
                " from dwd_traffic_user_jump_detail) t " +
                " group by is_new ");
        tEnv.createTemporaryView("jump",table2);


        Table table = tEnv.sqlQuery(
                " select " +
                        " stt," +
                        " edt, " +
                        " ii.is_new, " +
                        " if(t.midCount is not null,t.midCount,0) midCount, " +
                        " pageIdCount, " +
                        " if((jumpCount/sidCount+0.0) is not null,((jumpCount)/sidCount+0.0),cast(0 as double)) jump, " +
                        " if((duringTime/midCount+0.0) is not null,(duringTime/midCount+0.0),cast(0 as double)) avgDuringTime, " +
                        " if((pageIdCount/midCount+0.0) is not null,(pageIdCount/midCount+0.0),cast(0 as double)) avgPageId, " +
                        " ts " +
                        " from ( " +
                        "select" +
                        "  DATE_FORMAT(TUMBLE_START(rt, interval '5' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                        "  DATE_FORMAT(TUMBLE_END(rt, interval '5' second), 'yyyy-MM-dd HH:mm:ss')  edt, " +
                        " is_new, " +
                        " count(distinct sid) sidCount, " +
                        " count(page_id) pageIdCount, " +
                        " count(page_id)/count(distinct sid) avgPageId, " +
                        " sum(cast(during_time as bigint)) duringTime, " +
                        " sum(cast(during_time as bigint))/count(distinct sid) avgDuringTime,"+
                        " unix_timestamp()*1000 ts " +
                        " from ( " +
                        "select " +
                        " common['ar'] ar, " +
                        " common['ba'] ba, " +
                        " common['ch'] ch, " +
                        " common['is_new'] is_new, " +
                        " common['md'] md, " +
                        " common['mid'] mid, " +
                        " common['os'] os, " +
                        " common['sc'] sc, " +
                        " common['sid'] sid, " +
                        " common['uid'] uid, " +
                        " common['vc'] vc, " +
                        " page['during_time'] during_time, " +
                        " page['item'] item, " +
                        " page['item_type'] item_type, " +
                        " page['last_page_id'] last_page_id, " +
                        " page['page_id'] page_id, " +
                        " rt " +
                        " from " +
                        " dwd_traffic_page_log) as a  " +
                        " group by is_new, " +
                        " TUMBLE(rt, INTERVAL '5' SECOND) ) as ii " +
                        " left join tyuut t  on ii.is_new = t.is_new " +
                        " left join jump j  on  ii.is_new = j.is_new " +
                        "");


        DataStream<Tuple2<Boolean, TableBean>> tuple2DataStream = tEnv.toRetractStream(table, TableBean.class);
        SingleOutputStreamOperator<TableBean> tableBeanSingleOutputStreamOperator = tuple2DataStream.flatMap(new FlatMapFunction<Tuple2<Boolean, TableBean>, TableBean>() {
            @Override
            public void flatMap(Tuple2<Boolean, TableBean> value, Collector<TableBean> out) throws Exception {
                if (value.f0 = true) {
                    out.collect(value.f1);
                }
            }
        });
        SingleOutputStreamOperator<TableBean> process = tableBeanSingleOutputStreamOperator.keyBy(bean -> bean.getStt())
                .process(new KeyedProcessFunction<String, TableBean, TableBean>() {
                    private ValueState<String> stt1;

                    private ValueState<String> stt2;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        stt1 = getRuntimeContext().getState(new ValueStateDescriptor<String>("stt1", String.class));
                        stt2 = getRuntimeContext().getState(new ValueStateDescriptor<String>("stt2", String.class));
                    }

                    @Override
                    public void processElement(TableBean value, Context ctx, Collector<TableBean> out) throws Exception {
                        if (value.getIs_new().equals("1") && stt1.value() == null) {
                            stt1.update(value.getTs().toString());
                            out.collect(value);
                        }

                        if (value.getIs_new().equals("0") && stt2.value() == null) {
                            stt2.update(value.getTs().toString());
                            out.collect(value);
                        }
                    }
                });
        process.print();
        String sql = "insert into dws_time_sharing_statistics_flow values(?,?,?,?,?,?,?,?,?)";
        process.addSink(ClickHouseUtil.<TableBean>getJdbcSink(sql));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
