package org.eking.bigdata.spark;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;

import org.apache.spark.api.java.JavaDoubleRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;



import org.apache.spark.serializer.KryoRegistrator;
import org.apache.spark.sql.Row;
import org.apache.spark.SparkConf;

import scala.Tuple2;
import scala.collection.immutable.List;

public class TestSpark  {

	public static void main(String[] args) {
		// 
		String user;
		user = "bdtst";
		System.setProperty("HADOOP_USER_NAME", user);
		System.setProperty("SPARK_USER_NAME", user);
		System.setProperty("HADOOP_GROUP_NAME", "bdtst");
		//JavaDeserializationStream a = new JavaDeserializationStream();
		//FlatMapTest cFlatMapTest = new FlatMapTest();
		//MapTest maptest = new MapTest();
		SparkConf conf = new SparkConf().setAppName("SparkTest");
		conf.setMaster("spark://127.0.0.1:7702");
		//conf.setJars(pathtojar);
		JavaSparkContext sc = new JavaSparkContext(conf);
		JavaRDD<String> distFile = sc.textFile("hdfs://127.0.0.1:9001/lele/spark2.txt");

		//JavaRDD<String> words = distFile.flatMap(cFlatMapTest);
		distFile.cache();
		//distFile.partitioner();
		long LineNum = distFile.count();
		
		System.out.println("line nums is:" + LineNum);
		
		 JavaRDD<String> WordSplit = distFile.flatMap(new FlatMapFunction<String, String>() {
		      @Override
		      public Iterator<String> call(String s) {
		        return Arrays.asList(s.split(" ")).iterator();
		      }
		});
		 
		 JavaPairRDD<String, Integer> s = WordSplit.mapToPair(
			      new PairFunction<String, String, Integer>() {
			          @Override
			          public Tuple2<String, Integer> call(String s) {
			            return new Tuple2<>(s, 1);
			          }			        
	      });
		 
		// JavaPairRDD SumVal = s.groupByKey();
		 JavaPairRDD<String, Integer> SumVal = s.reduceByKey(
				 new Function2<Integer, Integer, Integer>(){
					 public Integer call(Integer i1, Integer i2) {
				          return i1 + i2;				        }
		 });
		//SumVal.reduce(new );
		//SumVal.saveAsTextFile("hdfs://10.71.88.13:9001/lele/output33");
		System.out.println(SumVal.collect());
		System.out.println("success" );
		sc.stop();
	}  

}
