package com.kangaroo.sparkcore;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Pattern;

/**
 * @author liubo
 * @ClassName SparkCoreDemo
 * @Description TODO
 * @Date 2020/11/1
 * @since JDK 1.8
 */
public class SparkCoreDemo {
    private static String sparkHome = ".";

    private static String appName = "sparkTest";

    private  static String master = "local";

    private static final Pattern SPACE = Pattern.compile(" ");
    public static void main(String[] args) {
//        SparkSession sparkSession= SparkSession.builder().
//                appName("javaCount").
//                getOrCreate();
//
//        SparkContext sparkContext=sparkSession.sparkContext();
//        JavaSparkContext javaSparkContext=new JavaSparkContext(sparkContext);
        SparkConf conf = new SparkConf().setAppName(appName).setMaster(master);
       JavaSparkContext context=new  JavaSparkContext(conf);
        JavaRDD<String> lines= context.textFile("E:\\spark\\int.txt");

        JavaRDD<String> a=lines.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String str){
                return Arrays.asList(SPACE.split(str)).iterator();
            }
        });

        JavaPairRDD<String,Integer> b=a.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String item){
                return new Tuple2<>(item,1);
            }
        });

        JavaPairRDD<String,Integer> c=b.reduceByKey((v1,v2)->(v1 + v2));

        c.collect().forEach(t-> System.out.println(t));

        List list=new ArrayList();


      Integer ssfsaf=  context.parallelize(Arrays.asList(1,2,3,4)).reduce((x,y)->(x+y));



//
//        Object ss=lines.map(a->a.length()).reduce((a,b)->a+b);
//
//        JavaRDD<Integer> asss=lines.map(s -> Integer.valueOf(s));
//        Integer sm=asss.reduce((a,b)->a+b);
//        System.out.println(sm);
//        JavaRDD<List<String>> tt = lines.map(a ->
//                Arrays.asList(SPACE.split(a)));

//        JavaRDD<List<Integer>> aaa=tt.map(a->{
//
//        })
//




//        JavaPairRDD<String,Integer> rdd1 = context.textFile("E:\\spark\\int.txt")
//                .flatMap( s -> Arrays.asList(SPACE.split(s)).iterator())
//                .mapToPair(s -> new Tuple2<>(s, 1))
//                .reduceByKey((v1,v2)-> (v1+v2));
//        System.out.println();
//        rdd1.collect().forEach(t-> System.out.println(t));

//        SQLContext sqlContext =new SQLContext(context);
//        DataFrame df= sqlContext.createDataFrame(people, Person.class);
        System.out.println("sfsa");
    }



}
