package org.zjt.flink;

import org.apache.flink.api.common.accumulators.Accumulator;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.configuration.Configuration;

import java.io.Serializable;
import java.util.Collection;

/**
 * Description:
 *
 *          广播、累加器
 *
 * @author juntao.zhang
 * Date: 2018-10-09 下午5:45
 * @see
 */
public class BroadcastValue {


    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        // 1. The DataSet to be broadcast
        DataSet<Integer> toBroadcast = env.fromElements(1, 2, 3);

        DataSet<String> data = env.fromElements("a", "b");

        data.map(new RichMapFunction<String, String>() {

            @Override
            public void open(Configuration parameters) throws Exception {

                // 3. Access the broadcast DataSet as a Collection
                Collection<Integer> broadcastSet = getRuntimeContext().getBroadcastVariable("broadcastSetName");
                broadcastSet.forEach(System.out::println);

                getRuntimeContext().addAccumulator("accumulator",new IntCounter());

            }


            @Override
            public String map(String value) throws Exception {

                Accumulator<Object, Serializable> accumulator = getRuntimeContext().getAccumulator("accumulator");

                accumulator.add(1);
                System.out.println(accumulator.getLocalValue());

                return value.toUpperCase();
            }
        }).withBroadcastSet(toBroadcast, "broadcastSetName").print(); // 2. Broadcast the DataSet设置广播变量


        Object accumulator = env.getLastJobExecutionResult().getAccumulatorResult("accumulator");
        System.out.println(accumulator);
    }
}
