package com.jaymin.flink.baisc;

import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * @author jaymin
 * @since 2024/3/30 23:44
 */
public class BatchWCAPP {
    public static void main(String[] args) throws Exception {
        ExecutionEnvironment environment = ExecutionEnvironment.getExecutionEnvironment();

        DataSource<String> dataSource = environment.readTextFile("data/wc.data");

        dataSource.flatMap(new WCFlatMapFunction())
                .filter((FilterFunction<String>) StringUtils::isNotBlank)
                .map(new WCMapFunction())
                .groupBy(0)
                .sum(1)
                .print();

    }
}

class WCFlatMapFunction implements FlatMapFunction<String, String> {

    @Override
    public void flatMap(String value, Collector<String> collector) throws Exception {
        String[] words = value.split(",");
        for (String word : words) {
            collector.collect(word.toLowerCase().trim());
        }
    }
}

class WCMapFunction implements MapFunction<String, Tuple2<String, Integer>> {

    @Override
    public Tuple2<String, Integer> map(String s) throws Exception {
        return new Tuple2<>(s, 1);
    }
}