package priv.tuyou.myself;

import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.Collector;

public class BatchTest {

	public static void main(String[] args) throws Exception {
		
		final ExecutionEnvironment env = 
				ExecutionEnvironment.getExecutionEnvironment();
		DataSet<String> source = env.readTextFile("D:/edgeServer/count.txt");
		source.flatMap((String value, Collector<Tuple2<String, Integer>> out) -> {
			String[] values = value.split("\\W+");
			for(String v : values){
				if(StringUtils.isNotBlank(v)){
					out.collect(new Tuple2<String, Integer>(v, 1));
				}
			}
		}).map(new RichMapFunction<Tuple2<String,Integer>, Tuple2<String,Integer>>() {
			private static final long serialVersionUID = 4927940180153251092L;
			private IntCounter numLines = new IntCounter();
			
			@Override
			public void open(Configuration parameters) throws Exception {
				getRuntimeContext().addAccumulator("tuyou", this.numLines);
				super.open(parameters);
			}
			
			@Override
			public Tuple2<String,Integer> map(Tuple2<String, Integer> value) throws Exception {
				numLines.add(1);
				return value;
			}
		}).groupBy(0).sum(1).print();
		Object obj = env.getLastJobExecutionResult().getAccumulatorResult("tuyou");
		System.out.println(obj);
	}
}
