import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.Counter;

/**
 * @author wangzj
 * @description Flink监控指标的统计
 * @date 2020/8/12 21:22
 */
public class FlinkMetrics extends RichMapFunction<String, Long> {
    private transient Counter counter;

    @Override
    public void open(Configuration configuration) {
        this.counter = getRuntimeContext()
                .getMetricGroup()
                .counter("myCounter");
    }

    @Override
    public Long map(String value) throws Exception {
        this.counter.inc();
        System.out.println("++++++++++++++" + this.counter.getCount());
        System.out.println("==================" + value);
//        return value;
        return this.counter.getCount();
    }

    public static void main(String[] args) throws Exception {
        // 创建Flink运行的上下文环境
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        // 创建DataSet，这里我们的输入是一行一行的文本
        DataSet<String> text = env.fromElements(
                "flink spark hadoop",
                "flink flink flink",
                "spark spark hadoop",
                "flink hadoop hadoop"
        );

        MapOperator<String, Long> counts = text.map(new FlinkMetrics());

        //打印结果
        counts.printToErr();
//        counts.print();
    }
}
