package com.atguigu.flink.chapter5.transform;

import com.atguigu.flink.WaterSensor;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2020/12/19 14:56
 */
public class Flink11_Transform_Reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        KeyedStream<WaterSensor, String> streamKeyed = env
          .socketTextStream("hadoop162", 9999)
          .map(line -> {
              String[] split = line.split(",");
              return new WaterSensor(split[0], Long.valueOf(split[1]), Integer.valueOf(split[2]));
          })
          .keyBy(WaterSensor::getId);

        // Spark:  reduceByKey foldByKey  aggregateByKey combineByKey
        /*streamKeyed
          .fold(0, new FoldFunction<WaterSensor, Integer>() {
              @Override
              public Integer fold(Integer accumulator, WaterSensor value) throws Exception {
                  System.out.println("fold...");
                  return accumulator + value.getVc();
              }
          })*/



        env.execute();
    }
}
/*
reduce:
    1. 聚合后结果的类型, 必须和聚合前流中数据的类型保持一致
    2. reduce方法,当碰到第一个元素的时候不执行
fold:
    1.12.0已经被移除
    会有初始化
    得到的流的数据类型和原来流的数据类型可以不一致
 */
