/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.xj.spark;

import org.apache.spark.api.java.StorageLevels;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;

import java.util.Arrays;
import java.util.List;

public class JavaWordCount {
    public static void main(String[] args) throws Exception {
    /*if (args.length < 2) {
      System.err.println("Usage: JavaWordCount <master> <file>");
      System.exit(1);
    }*/
        //String master = "spark://slaver1:7077";
        JavaSparkContext ctx = new JavaSparkContext("local[1]", "JavaWordCount",
                System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
        StorageLevel storageLevel = StorageLevels.DISK_ONLY;
        JavaRDD<String> lines = ctx.textFile(args[0], 1);
        /*JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
            public Iterable<String> call(String s) {
                return Arrays.asList(s.split(" "));
            }
        });*/

       /* List<String> word = words.collect();
        for (int i = 0; i < word.size(); i++) {
            String s = word.get(i);
            System.out.println(s + "----");
        }*/
        /*JavaPairRDD<String, String> ones = lines.map(new PairFunction<String, String, String>() {
            public Tuple2<String, String> call(String s) {
                String[] v = s.split("[\\s]");
                return new Tuple2<String, String>(v[0], v[1]);
            }
        });
        JavaPairRDD<String, String> counts = ones.reduceByKey(
                new Function2<String, String, String>() {
                    @Override
                    public String call(String s1, String s2) throws Exception {
                        return s1 + "\t" + s2;
                    }
                }
        );


        List<Tuple2<String, String>> output = counts.collect();
        for (Tuple2 tuple : output) {
            System.out.println(tuple._1() + ": " + tuple._2());
        }
        JavaRDD<Tuple2<String,Integer>> rdd = counts.map(new Function<Tuple2<String, String>, Tuple2<String,Integer>>() {
            @Override
            public Tuple2<String,Integer> call(Tuple2<String, String> t2) throws Exception {
                System.out.println(t2._1() + "  " + t2._1());
                if (t2._1().equals("xia")) {
                    return new Tuple2<String,Integer>(t2._1(),Integer.parseInt(t2._2().split("\t")[0]));
                }
                return new Tuple2<String,Integer>(t2._1(),1);
            }
        });
        List<Tuple2<String,Integer>> list = rdd.collect();
        for (int i = 0; i < list.size(); i++) {
            Tuple2<String,Integer> integer = list.get(i);
            System.out.println(integer._1()+"   "+integer._2());
        }
        System.exit(0);*/
    }
}
