package com.hhf.rrd.transformation;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.List;

/**
 * 排序
 *
 * @author huanghaifeng15
 * @date 2022/2/11 12:50
 **/
public class DistinctApp {
    public static void main(String[] args) throws Exception {
        List<String> list = new ArrayList<>();
        list.add("hadoop,spark");
        list.add("hadoop,flink");
        list.add("flink,flink");

        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSource<String> dataSource = env.fromCollection(list);
        FlatMapOperator<String, String> mapOperator = dataSource.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                String[] wordArr = value.split(",");
                for (String word : wordArr) {
                    out.collect(word);
                }
            }
        });

        mapOperator.distinct()
                .print();
    }
}
