//package cn.donghuapian.config;
//
//import org.apache.flink.api.common.functions.FlatMapFunction;
//import org.apache.flink.api.common.functions.MapFunction;
//import org.apache.flink.api.java.tuple.Tuple2;
//import org.apache.flink.streaming.api.datastream.DataStream;
//import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.util.Collector;
//import org.springframework.context.annotation.Bean;
//import org.springframework.context.annotation.Configuration;
//
//@Configuration
//public class FlinkConfig {
//
//    @Bean
//    public StreamExecutionEnvironment streamExecutionEnvironment() {
//        return StreamExecutionEnvironment.getExecutionEnvironment();
//    }
//
//    @Bean
//    public DataStream<String> wordCountDataStream(StreamExecutionEnvironment env) {
//        DataStream<String> text = env.fromElements("Hello World", "Flink is awesome", "Spring Boot rocks");
//
//        DataStream<String> wordCount = text.flatMap(new Tokenizer())
//                .keyBy(0)
//                .sum(1)
//                .map(new MapFunction<Tuple2<String, Integer>, String>() {
//                    @Override
//                    public String map(Tuple2<String, Integer> value) throws Exception {
//                        return value.f0 + ": " + value.f1;
//                    }
//                });
//
//        return wordCount;
//    }
//
//    public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> {
//        @Override
//        public void flatMap(String value, Collector<Tuple2<String, Integer>> out) {
//            String[] words = value.toLowerCase().split("\\W+");
//            for (String word : words) {
//                if (word.length() > 0) {
//                    out.collect(new Tuple2<>(word, 1));
//                }
//            }
//        }
//    }
//}
