package com.example.bigdata.spark

import org.apache.spark.{SparkConf, SparkContext}


object sparkDemo {
    def main(args: Array[String]): Unit = {
        val inputPath = args(0)
        val outputPath = args(1)
        println("inputPath : " +args(0)+"\noutputPath : " +args(1))
        val conf = SparkContextInit()
        readAndWrite(conf,inputPath,outputPath)
    }
    def SparkContextInit(): SparkConf ={
        // 初始化 sparkconf 对象，设置基本任务参数
        val conf = new SparkConf()
                // 设置目标Master 通信地址
                //.setMaster("yarn")
                .setMaster("local[1]")
                //设置任务名称
                .setAppName("读取hdfs文件-jingwei.shi")
        //实例化 SparkContext ，Spark 的对外接口负责用户与Spark内部的交互®通信
        return conf
    }
    def readAndWrite(conf:SparkConf,inputPath:String,outputPath:String): Unit ={
        val sc = new SparkContext(conf)
        val df1 = sc.textFile(inputPath)
        df1.foreach(println)
        df1.repartition(1).saveAsTextFile(outputPath)
    }

}
