package streamAPI.project.easy;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava18.com.google.common.hash.BloomFilter;
import org.apache.flink.shaded.guava18.com.google.common.hash.Funnels;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.testng.annotations.Test;
import util.FlinkUtils;

import java.util.HashSet;

/**
 * @author yue.cao
 * @since 10-28-2020
 */
public class DemoWithQuChong {

	public static void main(String[] args) throws Exception {
		//A,1 A,2 B,1 B,2 表示用户1参加了A活动  以此类推 求出每个活动参加了多少人
		DataStream<String> source = FlinkUtils.createCommonKafkaStream();

		source.map(new MapFunction<String, Tuple2<String, String>>() {
			@Override
			public Tuple2<String, String> map(String value) throws Exception {
				String[] split = value.split(",");
				return Tuple2.of(split[0], split[1]);
			}
		}).keyBy(0).map(new RichMapFunction<Tuple2<String, String>, Tuple2<String, Integer>>() {

			/**保存参加活动的UID*/
			private ValueState<HashSet> keyedState;

			@Override
			public void open(Configuration parameters) throws Exception {
				super.open(parameters);

				ValueStateDescriptor<HashSet> descriptor = new ValueStateDescriptor<>(
						"word-quchong",
						HashSet.class
				);
				keyedState = getRuntimeContext().getState(descriptor);
			}

			@Override
			public Tuple2<String, Integer> map(Tuple2<String, String> activityInfo) throws Exception {
				HashSet hashSet = keyedState.value();
				if (hashSet == null) {
					hashSet = new HashSet<String>();
				}
				hashSet.add(activityInfo.f1);
				keyedState.update(hashSet);
				return Tuple2.of(activityInfo.f0, hashSet.size());
			}
		}).print();
		FlinkUtils.addTroubleStreamForTestError();
		FlinkUtils.execute();
	}

	@Test(description = "BloomFilter 方式 这个方式其实是有问题的 当做例子看就好")
	public void t1() throws Exception {
		DataStream<String> source = FlinkUtils.createCommonKafkaStream();

		source.map(new MapFunction<String, Tuple2<String, String>>() {
			@Override
			public Tuple2<String, String> map(String value) throws Exception {
				String[] split = value.split(",");
				return Tuple2.of(split[0], split[1]);
			}
		}).keyBy(0).map(new RichMapFunction<Tuple2<String, String>, Tuple2<String, Integer>>() {

			private transient ValueState<BloomFilter> bloomFilterValueState;
			private transient ValueState<Integer> countValueState;

			@Override
			public void open(Configuration parameters) throws Exception {
				super.open(parameters);

				ValueStateDescriptor<BloomFilter> bloomDescriptor = new ValueStateDescriptor<>(
						"bloomfilter-state",
						BloomFilter.class
				);
				ValueStateDescriptor<Integer> countDescriptor = new ValueStateDescriptor<>(
						"bloomfilter-count-state",
						Integer.class
				);
				bloomFilterValueState = getRuntimeContext().getState(bloomDescriptor);
				countValueState = getRuntimeContext().getState(countDescriptor);
			}

			@Override
			public Tuple2<String, Integer> map(Tuple2<String, String> activityInfo) throws Exception {
				String activity = activityInfo.f0;
				String uid = activityInfo.f1;

				BloomFilter<String> bloomFilter = bloomFilterValueState.value();
				if (bloomFilter == null) {
					bloomFilter = BloomFilter.create(Funnels.unencodedCharsFunnel(), 10000);
					countValueState.update(0);
				}
				Integer value = countValueState.value();
				if (!bloomFilter.mightContain(uid)) {
					bloomFilter.put(uid);
					countValueState.update(value + 1);
				}
				bloomFilterValueState.update(bloomFilter);
				return Tuple2.of(activity,countValueState.value());
			}
		}).print();
		FlinkUtils.addTroubleStreamForTestError();
		FlinkUtils.execute();
	}
}

