package com.sui.bigdata.flink.sql.side.program;

import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import com.sui.bigdata.flink.sql.side.program.util.DistinctProcessFunction;
import com.sui.bigdata.flink.sql.side.program.util.MyProcessWindowFunction;
import com.sui.bigdata.flink.sql.side.program.util.WordCountData;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.common.typeutils.base.IntValueSerializer;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.delta.DeltaFunction;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.evictors.TimeEvictor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.api.windowing.triggers.DeltaTrigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

/**
 * @author YongChen
 * @date 2020/5/13 16:08
 * @description
 * @email yong_chen@sui.com
 */
public class Distinct {


    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);

        DataStream<Tuple3<String, Integer, Integer>> hashStream =
                env.socketTextStream("10.201.7.115", 8888).
                        map((data) -> {
                            String id = data.split(",")[0];
                            Integer click = Integer.valueOf(data.split(",")[1]);
                            return new Tuple3<>(id, id.hashCode() % 200, click);
                        }).returns(Types.TUPLE(Types.STRING, Types.INT, Types.INT));

//        hashStream.print();

//        DataStream<String> dedupStream0 =  hashStream.keyBy(0)
//                .process(new DistinctProcessFunction(),TypeInformation.of(String.class))
//                ;

//
//        DataStream<Tuple3<String ,Integer,Integer>> dedupStream = hashStream
//                .keyBy(0)
//                .window(TumblingProcessingTimeWindows.of(Time.milliseconds(10)))
//                .trigger(DeltaTrigger.of(10000,
//                        new DeltaFunction<Tuple3<String ,Integer,Integer>>(){
//                            @Override
//                            public double getDelta(Tuple3<String ,Integer,Integer> oldDataPoint,
//                                                   Tuple3<String ,Integer,Integer> newDataPoint) {
//                                return oldDataPoint.f1 - newDataPoint.f1;
//                            }
//                        },hashStream.getType().createSerializer(env.getConfig())))
//                .sum(2);



        DataStream<Tuple3<String,Long, Integer>> dedupStream1 = hashStream
                .keyBy(0)
                .window(TumblingProcessingTimeWindows.of(Time.days(1)))
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
                .evictor(TimeEvictor.of(Time.seconds(0),true))
                .process(new MyProcessWindowFunction());

//        dedupStream1.keyBy((Tuple3<String,Long,Integer> key) -> { return key.f0+ key.f1;}).sum(2);


        DataStream<Tuple1<Integer>> dedupStream2 = hashStream
                .keyBy(1)
                .window(GlobalWindows.create())
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(5)))
                .sum(2)
                .windowAll(TumblingProcessingTimeWindows.of(Time.seconds(5)))
                .sum(2)
                .map(new MapFunction<Tuple3<String, Integer, Integer>, Tuple1<Integer>>() {
                    @Override
                    public Tuple1<Integer> map(Tuple3<String, Integer, Integer> value) throws Exception {
                        return Tuple1.of(value.f2);
                    }
                });


//        DataStream<Tuple3<String, Integer, Integer>> dedupStream3 = hashStream
//                .keyBy(0)
//                .windowAll(GlobalWindows.create())
//                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
//                .sum(2);




//                .process(new DistinctProcessFunction(),TypeInformation.of(String.class));
//        dedupStream3.join(dedupStream1).where((Tuple3<String,Integer,Integer> t) ->{ return t.f0;}).equalTo((Tuple3<String,Long,Integer> t1) ->{ return t1.f0;});
        dedupStream2.print();
        env.execute("fuck");

    }


}
