package cn.itcast.flink.stream;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * 使用Flink 计算引擎实现流式数据处理：从Socket接收数据，实时进行词频统计WordCount
 */
public class _02StreamWordCount {

	public static void main(String[] args) throws Exception {
		// 1.准备环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		// 2.准备数据-source
//		DataStreamSource<String> inputDataStream = env.socketTextStream("node1.itcast.cn", 9999);
		DataStreamSource<String> inputDataStream = env.readTextFile("D:\\0615\\bigdata-flink\\datas\\wordcount.data");

		// 3.处理数据-transformation
		// TODO: 流计算词频统计WordCount与处理思路基本一致
		SingleOutputStreamOperator<Tuple2<String, Integer>> resultDataStream = inputDataStream
				// 分割单词
				.flatMap(new FlatMapFunction<String, String>() {
					@Override
					public void flatMap(String line, Collector<String> out) throws Exception {
						for (String word : line.trim().split("\\s+")) {
							out.collect(word);
						}
					}
				})
				// 转换二元组
				.map(new MapFunction<String, Tuple2<String, Integer>>() {
					@Override
					public Tuple2<String, Integer> map(String word) throws Exception {
						return new Tuple2<>(word, 1);
					}
				})
				// 分组聚合
				.keyBy(0).sum(1);

		// 4.输出结果-sink
		resultDataStream.print();

		// 5.触发执行-execute
		env.execute(_02StreamWordCount.class.getSimpleName());
	}

}
