package com.ruoyi.flink.chapten5;

/**
 * Copyright (c) 2020-2030 尚硅谷 All Rights Reserved
 * <p>
 * Project:  FlinkTutorial
 * <p>
 * Created by  wushengran
 */

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Trans5ReduceTest {
	@SuppressWarnings("serial")
	public static void main(String[] args) throws Exception {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		DataStreamSource<Event> stream = env.fromElements(new Event("Mary", "./home", 1000L),
				new Event("Bob", "./cart", 2000L), new Event("Alice", "./prod?id=10", 3000L),
				new Event("Bob", "./prod?id=1", 3300L), new Event("Bob", "./home", 3500L),
				new Event("Alice", "./prod?id=200", 3200L), new Event("Bob", "./prod?id=2", 3800L),
				new Event("Bob", "./prod?id=3", 4200L));

		// 统计每个用户的访问频次
		SingleOutputStreamOperator<Tuple2<String, Long>> clicksByuser = stream
				.map(new MapFunction<Event, Tuple2<String, Long>>() {
					@Override
					public Tuple2<String, Long> map(Event e) throws Exception {
						return Tuple2.of(e.user, 1L);
					}
				}).keyBy(data -> data.f0) // 使用用户名来进行分流
				.reduce(new ReduceFunction<Tuple2<String, Long>>() {
					@Override
					public Tuple2<String, Long> reduce(Tuple2<String, Long> value1, Tuple2<String, Long> value2)
							throws Exception {
						// 每到一条数据，用户pv的统计值加1
						return Tuple2.of(value1.f0, value1.f1 + value2.f1);
					}
				});

		// 选取当前最活跃的用户
		SingleOutputStreamOperator<Tuple2<String, Long>> result = clicksByuser.keyBy(data -> "key")
				.reduce(new ReduceFunction<Tuple2<String, Long>>() {

					@Override
					public Tuple2<String, Long> reduce(Tuple2<String, Long> value1, Tuple2<String, Long> value2)
							throws Exception {
						return value1.f1 > value2.f1 ? value1 : value2;
					}

				});
		
		
		result.print();
		
		

//				.keyBy(r -> true) // 为每一条数据分配同一个key，将聚合结果发送到一条流中去
//				.reduce(new ReduceFunction<Tuple2<String, Long>>() {
//					@Override
//					public Tuple2<String, Long> reduce(Tuple2<String, Long> value1, Tuple2<String, Long> value2)
//							throws Exception {
//						// 将累加器更新为当前最大的pv统计值，然后向下游发送累加器的值
//						return value1.f1 > value2.f1 ? value1 : value2;
//					}
//				}).print();

		env.execute();

	}
}
