package com.cike.sparkstudy.sql.java;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

public class RevisePhrase {

    public static void main(String args[]){
        String master = "local";
        String appName = "RevisePhrase";
        String inputPath = "file:///RevisePhrase.txt";

        //屏蔽不必要的日志显示在终端上
        //Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
        //Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)

        //val conf = new SparkConf().setMaster(master).setAppName(appName);
        SparkConf conf = new SparkConf().setMaster(master).setAppName(appName);

        //val sc = new SparkContext(conf)
        JavaSparkContext javaSparkContext = new JavaSparkContext(conf);

        //val file = sc.textFile(inputPath)
        JavaRDD<String> file = javaSparkContext.textFile(inputPath);

        /*val words = file.map(line =>
        if (line.contains("省")) {
            line
        } else {
            val province = line.substring(0, 2)
            val city = line.substring(2)
            province + "省" + city
        }
    )*/

        JavaRDD<String> words = file.map(new Function<String, String>() {
            @Override
            public String call(String line) throws Exception {
                if (line.contains("省")) {
                    return line;
                } else {
                    String province = line.substring(0, 2);
                    String city = line.substring(2);
                    province = province + "省" + city;
                    return null;
                }
            }
        });

        //words.collect().foreach(println)
        words.collect();
        //这里是遍历输出
        //sc.stop()
        javaSparkContext.stop();

    }
}
