package cn._51doit.day04;

import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichReduceFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;

import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;


/**
 * @create: 2021-10-17 11:21
 * @author: 今晚打脑斧
 * @program: KeByDemo01
 * @Description:
 *  实时统计广告点击的人数和次数
 *  第二版,使用布隆过滤器
 * #广告id,用户id
 *    ad1,user1
 *    ad1,user1
 *    ad1,user2
 *    ad2,user1
 *    ad2,user2
 *
 *    #结果
 *    ad1,2,3
 *    ad2,2,2
 **/
public class ZuoYe3 {
    public static void main(String[] args) throws Exception {
        Configuration configuration = new Configuration();
        configuration.setInteger("rest.port", 8081);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);
        DataStreamSource<String> lines = env.socketTextStream("doit01", 8888);

        SingleOutputStreamOperator<Tuple3<String, String, Integer>> mapped = lines.map(new MapFunction<String, Tuple3<String, String, Integer>>() {
            @Override
            public Tuple3<String, String, Integer> map(String s) throws Exception {
                String[] split = s.split(",");
                return Tuple3.of(split[0], "用户"+split[1], 1);
            }
        });
        //按照广告进行分区,相同广告肯定分到同一区内
        KeyedStream<Tuple3<String, String, Integer>, String> keyedStream = mapped.keyBy(tp -> tp.f0);

        SingleOutputStreamOperator<Tuple3<String, String, Integer>> reduce = keyedStream.reduce(new RichReduceFunction<Tuple3<String, String, Integer>>() {
            HashMap<String,HashSet<String>> mm ;
            boolean flag ;

            @Override
            public void open(Configuration parameters) throws Exception {
                flag = true;
                 mm = new HashMap<>();
            }

            //对数据进行滚动聚合,并且把人次加载进去
            @Override
            public Tuple3<String, String, Integer> reduce(Tuple3<String, String, Integer> in1, Tuple3<String, String, Integer> in2) throws Exception {
                in1.f2 = in1.f2 + in2.f2;
                //将第一个广告和人名存起来
                    HashSet<String> hashSet = new HashSet<>();
                    hashSet.add(in1.f1.toString());
                    mm.put(in1.f0,hashSet);


                //将第一个广告的第二个名字存进来,并计算人次
                mm.get(in1.f0).add(in2.f1.toString());
                int l = mm.get(in1.f0).size();

                return Tuple3.of(in1.f0, String.valueOf(l) , in1.f2);
            }
        });
        //打印,把第一次的数据判断出来打印人次为1
        reduce.addSink(new SinkFunction<Tuple3<String, String, Integer>>() {
            @Override
            public void invoke(Tuple3<String, String, Integer> value) throws Exception {
                try {
                    int i = Integer.parseInt(value.f1);
                    System.out.println("广告 "+value.f0+" 人次 "+value.f1+" 次数 "+value.f2);
                }catch (Exception e){
                    System.out.println("广告 "+value.f0+" 人次 "+1+" 次数 "+value.f2);
                }

            }
        });
        env.execute();
    }

}
