package spark;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;

import java.util.Arrays;

/**
 * 作者: LDL
 * 功能说明:
 * 创建日期: 2015/6/24 13:28
 */
public class WordCount {
    public static void main(String[] args) throws InterruptedException {
        SparkConf sparkConf = new SparkConf().setMaster("local[2]").setAppName("JavaNetworkWordCount");
        JavaStreamingContext jsc = new JavaStreamingContext(sparkConf, Durations.seconds(10));
        JavaDStream<String> lines = jsc.textFileStream("hdfs:///data/dfs/10.txt");
        //JavaDStream<String> lines = jsc.receiverStream(new JavaCustomReceiver("localhost", 9999));
        //JavaReceiverInputDStream<String> lines = jsc.socketTextStream("localhost", 9999);
        JavaDStream<String> words = lines.flatMap(s -> Arrays.asList(s.split(" ")));

        JavaPairDStream<String,Integer> wordCounts = words.mapToPair(s -> new Tuple2<>(s, 1)).reduceByKey((i1, i2) -> i1 + i2);
        /*.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer integer, Integer integer2) throws Exception {
                return integer + integer2;
            }
        }, new Duration(50000),new Duration(20000));*/
        /*JavaDStream<String> words = lines.flatMap(
                new FlatMapFunction<String, String>() {
                    @Override public Iterable<String> call(String x) {
                        return Arrays.asList(x.split(" "));
                    }
                });
        JavaPairDStream<String, Integer> pairs = words.mapToPair(
                new PairFunction<String, String, Integer>() {
                    @Override public Tuple2<String, Integer> call(String s) {
                        return new Tuple2<String, Integer>(s, 1);
                    }
                });
        JavaPairDStream<String, Integer> wordCounts = pairs.reduceByKey(
                new Function2<Integer, Integer, Integer>() {
                    @Override public Integer call(Integer i1, Integer i2) {
                        return i1 + i2;
                    }
                });*/

        //wordCounts.print();
        wordCounts.saveAsHadoopFiles("hdfs://hadoop01:8020/data/dfs/result", "txt", Text.class, LongWritable.class, NullOutputFormat.class);
        jsc.start();
        jsc.awaitTermination();
        jsc.stop();

    }
}
