package cn.tedu.flinkbasic.dataset;

import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.util.Collector;

/**
 * 针对Transformation的练习
 */
public class TransformationDemo {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //2.获取数据源
//        DataSource<String> source = env.readTextFile("data.txt");
        DataSource<String> source1 = env.readTextFile("teacher.txt");
        DataSource<String> source2 = env.fromElements("liupx|female", "liuyj|male", "zhangsz|male");
        MapOperator<String, Tuple3<String, String, String>> map1 = source1.map(new MapFunction<String, Tuple3<String, String, String>>() {
            @Override
            public Tuple3<String, String, String> map(String value) throws Exception {
                String[] s = value.split("\\|");
                return new Tuple3<>(s[0], s[1], s[2]);
            }
        });
        MapOperator<String, Tuple2<String, String>> map2 = source2.map(new MapFunction<String, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> map(String value) throws Exception {
                String[] s = value.split("\\|");
                return new Tuple2<>(s[0], s[1]);
            }
        });
        map1.leftOuterJoin(map2)
                .where(0)
                .equalTo(0)
                .with(new JoinFunction<Tuple3<String, String, String>,
                        Tuple2<String, String>,
                        Tuple4<String, String, String, String>>() {
                    @Override
                    public Tuple4<String, String, String, String> join(Tuple3<String, String, String> first, Tuple2<String, String> second) throws Exception {
                        return new Tuple4<>(first.f0, first.f1, first.f2, second == null ? "null" : second.f1);
                    }
                })
        //4.输出结果
//                 .print();
                .writeAsText("result2.txt").setParallelism(1);
//                .writeAsText("hdfs://hadoop01:9000/data/result.txt").setParallelism(2);
        //5.提交执行 !:在DataSetAPI中只有数据最终落地才需要提交执行,否则程序不报错但是数据不会落地.
        // 并且当结果只是打印的时候不能写触发执行否则报错
        env.execute("TransformationDemo");
//                .reduce(new ReduceFunction<Integer>() {
//            @Override
//            public Integer reduce(Integer value1, Integer value2) throws Exception {
////                System.out.println("v1="+value1);
////                System.out.println("v2="+value2);
//                return value1+value2;
//            }
//        })
                //3.Transformation转化数据
//        source.filter(new FilterFunction<String>() {
//            @Override
//            public boolean filter(String value) throws Exception {
//                System.out.println("过滤器触发"+value);
//                return value.split("\\|").length == 3;
//            }
//        })
//                .map(new MapFunction<String, Teacher>() {
//            @Override
//            public Teacher map(String value) throws Exception {
//                String[] s = value.split("\\|");
//                Teacher t = new Teacher();
//                t.setName(s[0]);
//                t.setAge(Integer.parseInt(s[1]));
//                t.setAddr(s[2]);
//                return t;
//            }
//        })
//        source.flatMap(new FlatMapFunction<String, Tuple2<String,Integer>>() {
//            @Override
//            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
//                String[] split = value.split(" ");
//                for (String s : split) {
//                    out.collect(new Tuple2<>(s,1));
//                }
//            }
//        }).groupBy(0).sum(1)

    }
}
