package com.zfei.firstflink;

import org.apache.flink.api.common.functions.FlatJoinFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.FoldFunction;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;

import org.apache.flink.util.Collector;
import scala.Int;

import java.io.File;
import java.util.HashMap;

/***
 *$com.zfei.WikipediaAnalysis
 *@author:jiwenfei
 *
 *@version:
 *
 *createTime:2018/8/15:下午4:32
 *
 *description:
 */
public class WikipediaAnalysis {
    public static void main(String[] args) throws Exception {
//        StreamExecutionEnvironment see=StreamExecutionEnvironment.getExecutionEnvironment();
//        DataStream<WikipediaEditEvent>edits= see.addSource(new WikipediaEditsSource());
//        KeyedStream<WikipediaEditEvent, String> keyedEdits = edits
//                .keyBy(new KeySelector<WikipediaEditEvent, String>() {
//                    @Override
//                    public String getKey(WikipediaEditEvent event) {
//                        return event.getUser();
//                    }
//                });
//        DataStream<Tuple2<String, Long>> result = keyedEdits
//                .timeWindow(Time.seconds(5))
//                .fold(new Tuple2<>("", 0L), new FoldFunction<WikipediaEditEvent, Tuple2<String, Long>>() {
//                    @Override
//                    public Tuple2<String, Long> fold(Tuple2<String, Long> acc, WikipediaEditEvent event) {
//                        acc.f0 = event.getUser();
//                        acc.f1 += event.getByteDiff();
//                        return acc;
//                    }
//                });
//        result.print();
//        see.execute();


//        StreamExecutionEnvironment streamExecutionEnvironment=StreamExecutionEnvironment.getExecutionEnvironment();
//        File f = new File(WikipediaAnalysis.class.getResource("/").getPath());
//        System.out.println(f);
//        DataStream<String >dd=streamExecutionEnvironment.readTextFile("/Users/zkxbx/HeartBeatDataUploadController.java");
//        DataStream<WorldAndCount>hashMapDataStream= dd.flatMap(new FlatMapFunction<String, WorldAndCount>() {
//            @Override
//            public void flatMap(String value, Collector<WorldAndCount> out) {
//                for (String word : value.split("\\s") ){
//
//                    out.collect(new WorldAndCount(word,1));
//                }
//            }
//        });
//        hashMapDataStream.keyBy("word")
//                //.timeWindow(Time.seconds(5),Time.seconds(1))
//                .reduce(new ReduceFunction<WorldAndCount>() {
//            @Override
//            public WorldAndCount reduce(WorldAndCount worldAndCount, WorldAndCount t1) throws Exception {
//                return new WorldAndCount(worldAndCount.word,worldAndCount.count+t1.count);
//            }
//        }).print().setParallelism(1);
//        streamExecutionEnvironment.execute("Socket Window WordCount");




        StreamExecutionEnvironment see=StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String >dd= see.socketTextStream("localhost",9999);

     dd.filter(item->{
         if (item.trim().equals(""))
         return false;
         else return true;
     }).flatMap(new FlatMapFunction<String, HashMap<String,Integer>>() {
            @Override
            public void flatMap(String sentence, Collector<HashMap<String,Integer>> out) throws Exception {
                for (String word:sentence.split(" ")){
                    HashMap<String,Integer>dd=new HashMap<>();
                    dd.put(word,1);
                    out.collect(dd);

                }
            }
        }).keyBy(new KeySelector<HashMap<String,Integer>, String>() {
                    @Override
                    public String getKey(HashMap<String,Integer> event) {
                        return  event.keySet().iterator().next();
                    }
                }).reduce(new ReduceFunction<HashMap<String, Integer>>() {
                 @Override
                 public HashMap<String, Integer> reduce(HashMap<String, Integer> stringIntegerHashMap, HashMap<String, Integer> t1) throws Exception {
                     HashMap<String, Integer> stringIntegerHashMa1=new HashMap<>();
                     String dkey= stringIntegerHashMap.keySet().iterator().next();
                     stringIntegerHashMa1.put(dkey,stringIntegerHashMap.get(dkey)+1);
                     return stringIntegerHashMa1;
                 }
     })
             .print().setParallelism(2);
//              map(item->{
//                  System.out.println(item.toString());
//                    return "qqq";
//              });

        see.execute("dddfd");








    }
//   public static class  WorldAndCount{
//      public   String word;
//      public   Integer count;
//
//       public String getWord() {
//           return word;
//       }
//
//       public void setWorld(String word) {
//           this.word = word;
//       }
//
//       public Integer getCount() {
//           return count;
//       }
//
//       public void setCount(Integer count) {
//           this.count = count;
//       }
//       WorldAndCount(String word, Integer count){
//           this.word=word;
//           this.count=count;
//       }
//       public WorldAndCount() {}
//       @Override
//       public String toString() {
//           return word + " : " + count;
//       }
//   }
}
