package com.jaymin.flink.datastream.transformation;

import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.util.List;

/**
 * @author jaymin
 * @since 2024/4/5 19:04
 */
public class RichTransformationApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        map(executionEnvironment);
//        filter(executionEnvironment);
//        flatMap(executionEnvironment);
//        keyBy(executionEnvironment);
        executionEnvironment.execute("SourceApp");
    }

    private static void keyBy(StreamExecutionEnvironment executionEnvironment) {
        DataStreamSource<String> streamSource = executionEnvironment.readTextFile("data/access.log");
        streamSource.map(new MapFunction<String, Tuple2<String, BigDecimal>>() {
                    @Override
                    public Tuple2<String, BigDecimal> map(String s) throws Exception {
                        String[] splits = s.split(",");
                        String domain = splits[1].trim();
                        BigDecimal traffic = new BigDecimal(splits[2].trim());
                        return Tuple2.of(domain, traffic);
                    }
                })
                .keyBy(t -> t.f0)
                .reduce(new ReduceFunction<Tuple2<String, BigDecimal>>() {
                    @Override
                    public Tuple2<String, BigDecimal> reduce(Tuple2<String, BigDecimal> a1, Tuple2<String, BigDecimal> a2) throws Exception {
                        return Tuple2.of(a1.f0, a1.f1.add(a2.f1));
                    }
                })
                .print();
    }

    private static void flatMap(StreamExecutionEnvironment executionEnvironment) {
        executionEnvironment.fromData(
                        "1,2,3,4,5",
                        "2,3,4,5,6",
                        "11,22,33,44,55"
                )
                .flatMap(new FlatMapFunction<String, String>() {
                    @Override
                    public void flatMap(String s, Collector<String> collector) throws Exception {
                        Iterable<String> iterable = Splitter.on(',').trimResults().omitEmptyStrings().split(s);
                        List<String> list = Lists.newArrayList(iterable);
                        for (String s1 : list) {
                            collector.collect(s1);
                        }
                    }
                })
                .print();
        ;
    }

    private static void filter(StreamExecutionEnvironment executionEnvironment) {
        DataStreamSource<String> streamSource = executionEnvironment.readTextFile("data/access.log");
        streamSource.filter(s -> {
                    String[] splits = s.split(",");
                    BigDecimal traffic = new BigDecimal(splits[2].trim());
                    return traffic.compareTo(new BigDecimal("4000")) > 0;
                })
                .print();
    }

    private static void map(StreamExecutionEnvironment executionEnvironment) {
        DataStreamSource<String> streamSource = executionEnvironment.readTextFile("data/access.log");
        executionEnvironment.setParallelism(3);
        streamSource.map(new PkMapFunction())
                .print();


    }
}
