package com.atguigu.gmall.app.dws.log;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.bean.ChannelFlowBean;
import com.atguigu.gmall.bean.TableBean;
import com.atguigu.gmall.bean.TableTwoBean;
import com.atguigu.gmall.utils.ClickHouseUtil;
import com.atguigu.gmall.utils.KafkaUtil;
import com.atguigu.gmall.utils.SQLUtil;
import lombok.val;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.api.windowing.windows.Window;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.lang.reflect.Array;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;

public class DwsChannelFlowApp {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.getConfig().setIdleStateRetention(Duration.ofMinutes(15));

        String topicName = "dwd_traffic_page_log";
        String groupID = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_page_log( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint, " +
                " `rt` AS TO_TIMESTAMP_LTZ(ts,3), " +
                " WATERMARK FOR rt AS rt - INTERVAL '0' SECOND " +
                ")" + SQLUtil.getKafkaSource(topicName,groupID));

        String topicName2 = "dwd_traffic_unique_visitor_detail";
        String groupID2 = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_unique_visitor_detail( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint " +
                ")" + SQLUtil.getKafkaSource(topicName2,groupID2));



        String topicName3 = "dwd_traffic_user_jump_detail";
        String groupID3 = "channel_flow_app";
        tEnv.executeSql("create table dwd_traffic_user_jump_detail( " +
                " `common` map<string,string>, " +
                " `page` map<string,string>, " +
                " `ts` bigint " +
                ")" + SQLUtil.getKafkaSource(topicName3,groupID3));

        Table table1 = tEnv.sqlQuery(" " +
                " select " +
                " ch," +
                " count(*) midCount " +
                " from( " +
                " select  " +
                " common['ch'] ch " +
                " from dwd_traffic_unique_visitor_detail) t " +
                " group by ch ");
        tEnv.createTemporaryView("tyuut",table1);

        Table table2 = tEnv.sqlQuery(" " +
                " select " +
                " ch," +
                " count(*) jumpCount " +
                " from( " +
                " select  " +
                " common['ch'] ch " +
                " from dwd_traffic_user_jump_detail) t " +
                " group by ch ");
        tEnv.createTemporaryView("jump",table2);


        Table table = tEnv.sqlQuery(
                " select " +
                        " stt," +
                        " edt, " +
                " ii.ch, " +
                " if(t.midCount is not null,t.midCount,0) midCount, " +
                " sidCount , " +
                " avgPageId, " +
                " avgDuringTime, " +
                " if(((jumpCount+0.0)/sidCount) is not null,(jumpCount+0.0)/sidCount,cast(0 as double)) jump, " +
                " ts " +
                " from ( " +
                "select" +
                        "  DATE_FORMAT(TUMBLE_START(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                        "  DATE_FORMAT(TUMBLE_END(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss')  edt, " +
                " ch, " +
                " count(distinct sid) sidCount, " +
                " count(page_id)/count(distinct sid) avgPageId, " +
                " sum(cast(during_time as bigint))/count(distinct sid) avgDuringTime,"+
                " unix_timestamp()*1000 ts " +
                " from ( " +
                "select " +
                " common['ar'] ar, " +
                " common['ba'] ba, " +
                " common['ch'] ch, " +
                " common['is_new'] is_new, " +
                " common['md'] md, " +
                " common['mid'] mid, " +
                " common['os'] os, " +
                " common['sc'] sc, " +
                " common['sid'] sid, " +
                " common['uid'] uid, " +
                " common['vc'] vc, " +
                " page['during_time'] during_time, " +
                " page['item'] item, " +
                " page['item_type'] item_type, " +
                " page['last_page_id'] last_page_id, " +
                " page['page_id'] page_id, " +
                " rt " +
                " from " +
                " dwd_traffic_page_log) as a  " +
                " group by ch, " +
                        " TUMBLE(rt, INTERVAL '10' SECOND) ) as ii " +
                " left join tyuut t on ii.ch = t.ch " +
                " left join jump j on  ii.ch = j.ch " +
                        "");

        DataStream<Tuple2<Boolean, TableTwoBean>> tuple2DataStream = tEnv.toRetractStream(table, TableTwoBean.class);
        SingleOutputStreamOperator<TableTwoBean> tableBeanSingleOutputStreamOperator = tuple2DataStream.flatMap(new FlatMapFunction<Tuple2<Boolean, TableTwoBean>, TableTwoBean>() {
            @Override
            public void flatMap(Tuple2<Boolean, TableTwoBean> value, Collector<TableTwoBean> out) throws Exception {
                if (value.f0 = true) {
                    out.collect(value.f1);
                }
            }
        });
        tableBeanSingleOutputStreamOperator.print();
        String sql = "insert into dws_channel_flowApp values(?,?,?,?,?,?,?,?,?)";
        tableBeanSingleOutputStreamOperator.addSink(ClickHouseUtil.<TableTwoBean>getJdbcSink(sql));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
