package com.demo.spark.controller;

import com.alibaba.fastjson.JSON;
import com.demo.spark.model.TestModel;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.deploy.SparkSubmit;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import scala.Serializable;
import scala.Tuple2;

import java.io.IOException;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Pattern;

/**
 * @author liuyuanqi
 * @date 2019-07-05 10:48
 */

@Slf4j
@RestController
@RequestMapping("/test")
public class TestController implements Serializable {

    private static final long serialVersionUID = 164182919655618922L;

    private static transient final Pattern SPACE = Pattern.compile(" ");

//    @Autowired
//    private JavaSparkContext sc;

    @PostMapping(value = "/c")
    public Object c() {
        String[] args = new String[] { "--master",
                "spark://192.168.1.168:7077",
                "--name", "test java submit job to spark",
                "--class", "com.demo.spark.util.WordCount",
                "/soft/jar/spark-1.0.jar"};
        SparkSubmit.main(args);
        return "c";
    }

    @PostMapping(value = "/b")
    public Object b() throws IOException {
//        System.setProperty("hadoop.home.dir", "D://hadoop-common-2.2.0-bin-master");
        SparkLauncher launcher = new SparkLauncher();
        SparkAppHandle handler = launcher.setAppName("Test Submit Spark Job From Java")
                .setSparkHome("/soft/spark-2.4.3-bin-hadoop2.7")
                .setMaster("spark://192.168.1.168:7077")
                .setAppResource("/soft/jar/spark-1.0.jar")
                .setMainClass("com.demo.spark.RemoteMainTest")
                .setDeployMode("cluster")
//                .addAppArgs(new String[]{"pandas", "monkey"})
                .setConf("spark.master","spark://192.168.1.168:7077")
                .setConf("spark.app.id", "11222")
                .setConf("spark.driver.memory", "2g")
                .setConf("spark.akka.frameSize", "200")
                .setConf("spark.executor.memory", "1g")
                .setConf("spark.executor.instances", "32")
                .setConf("spark.executor.cores", "3")
                .setConf("spark.default.parallelism", "10")
                .setConf("spark.driver.allowMultipleContexts", "true")
                .setConf("spark.cores.max", "4")
                .setVerbose(true)
                .startApplication();
//                .startApplication(new SparkAppHandle.Listener() {
//                    //这里监听任务状态，当任务结束时（不管是什么原因结束）,isFinal（）方法会返回true,否则返回false
//                    @Override
//                    public void stateChanged(SparkAppHandle sparkAppHandle) {
//                        if (sparkAppHandle.getState().isFinal()) {
//                            countDownLatch.countDown();
//                        }
//                        System.out.println("state-------:" + sparkAppHandle.getState().toString());
//                    }
//                    @Override
//                    public void infoChanged(SparkAppHandle sparkAppHandle) {
//                        System.out.println("Info-------:" + sparkAppHandle.getState().toString());
//                    }
//                });
        return "666";
    }

    @PostMapping(value = "/a")
    public Object a() throws AnalysisException {
//        System.setProperty("hadoop.home.dir", "D://hadoop-common-2.2.0-bin-master");
        /**
         * 对于所有的spark程序所言，要进行所有的操作，首先要创建一个spark上下文。
         * 在创建上下文的过程中，程序会向集群申请资源及构建相应的运行环境。
         * 设置spark应用程序名称
         * 创建的 sarpkContext 唯一需要的参数就是 sparkConf，它是一组 K-V 属性对。
         */
//        SparkConf sparkConf = new SparkConf().setAppName("JavaWordCount")
//                .setMaster("spark://192.168.1.168:7077").set("spark.driver.allowMultipleContexts","true");
        SparkConf sparkConf = new SparkConf().setAppName("JavaWordCount")
                .setMaster("local").set("spark.driver.allowMultipleContexts","true");
        JavaSparkContext ctx = new JavaSparkContext(sparkConf);
        SQLContext sqlContext = SQLContext.getOrCreate(ctx.sc());
        Properties sourcePro = new Properties();
            sourcePro.setProperty("driver", "oracle.jdbc.driver.OracleDriver");
            sourcePro.setProperty("user", "UBDI");
            sourcePro.setProperty("password", "123456");
            sourcePro.setProperty("dbtable", "TEST_TABLE1");
        Properties desPro = new Properties();
            desPro.setProperty("driver", "com.mysql.jdbc.Driver");
            desPro.setProperty("user", "root");
            desPro.setProperty("password", "123");
            desPro.setProperty("dbtable", "t_clone");
        Long startTime = System.currentTimeMillis();

//        Dataset da = sqlContext.read().jdbc("jdbc:mysql://192.168.1.167:3306/test?useUnicode=true&characterEncoding=UTF-8",
//                "t", pro);jdbc:oracle:thin:@192.168.1.162:1521:UQIAN
        Dataset da = sqlContext.read().jdbc("jdbc:oracle:thin:@192.168.1.221:1521:UQIAN",
                "TEST_TABLE1", sourcePro);
        da.createOrReplaceTempView("TEST_TABLE1");
        Dataset df1 = sqlContext.sql("select * from TEST_TABLE1");
            df1.show();
        df1.write().mode("overwrite").jdbc("jdbc:mysql://192.168.1.167:3306/test?useUnicode=true&characterEncoding=UTF-8",
                "t_clone", desPro);

        Long endTime = System.currentTimeMillis();
        System.out.println( "--------------------------" );
        System.out.println( (endTime-startTime)/1000 );
        System.out.println( "--------------------------" );

//        JavaRDD<String> lines = ctx.textFile("abc", 1);
//        JavaRDD<String> lines = ctx.parallelize(Arrays.asList("pandas", "like"));
//
//        JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
//            @Override
//            public Iterator call(String s) {
//                return Arrays.asList(SPACE.split(s)).iterator();
//            }
//        });
//
//        JavaPairRDD<String, Integer> ones = words.mapToPair(new PairFunction<String, String, Integer>() {
//            @Override
//            public Tuple2<String, Integer> call(String s) {
//                return new Tuple2<String, Integer>(s, 1);
//            }
//        });
//
//        JavaPairRDD<String, Integer> counts = ones.reduceByKey(new Function2<Integer, Integer, Integer>() {
//            //reduce阶段，key相同的value怎么处理的问题
//            @Override
//            public Integer call(Integer i1, Integer i2) {
//                return i1 + i2;
//            }
//        });
//
//        List<Tuple2<String, Integer>> output = counts.collect();
//        for (Tuple2<?,?> tuple : output) {
//            System.out.println(tuple._1() + ": " + tuple._2());
//        }
        ctx.stop();
        return "888";
    }

}
