package com.xxxx.test1;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import java.util.Arrays;

/**
 * flink第一课,第一次代码
 * @className WordCountDataSetJava
 * @description WordCountDataSetJava
 * @author zhush
 * @date 2025/5/31 16:34
 **/

public class WordCountDataSetJava {
    public static void main(String[] args) throws Exception {
        //创建环境
        ExecutionEnvironment environment =   ExecutionEnvironment.getExecutionEnvironment();
        //获取数据源
        DataSet<String> source =    environment.readTextFile("data/dataset.txt");
        //开始转换
        FlatMapOperator<String, String> flatMap = source.flatMap(new
             FlatMapFunction<String, String>() {
                 @Override
                 public void flatMap(String line, Collector<String>
                         collector) throws Exception {
                     Arrays.stream(line.split(" ")).forEach(word ->
                             collector.collect(word));
                 }
             });
        //开始计数
        MapOperator<String, Tuple2<String, Integer>> map =
            flatMap.map(new MapFunction<String, Tuple2<String, Integer>>() {
                @Override
                public Tuple2<String, Integer> map(String word) throws
                        Exception {
                    return Tuple2.of(word, 1);
                }
            });
        System.out.println(map);
        //开始分类并统计
        AggregateOperator<Tuple2<String, Integer>> sum =
                map.groupBy(0).sum(1);
        sum.print();
    }
}
