package com.sankuai.fsp.di.platform

import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext

import scala.collection.immutable.List
import scala.collection.mutable.ArrayBuffer

object SparkArchetype {
  def main(args: Array[String]): Unit = {

    //日志显示级别,设置打印error级别的错误日志
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.ERROR)

    var sparkConfig=new SparkConf().setMaster("local").setAppName("sparkRddTest")

    var sc=new SparkContext(sparkConfig)

    var rdd=sc.makeRDD(Array((1,3),(2,2),(3,2),(4,2)))

    rdd.flatMap(x=>x._1+"￥"+x._2).collect.foreach(println)

    println("******************")

    rdd.map(x=>x._1+x._2).collect.foreach(println)

    println("******************")

    rdd.flatMapValues(x=>x+"").collect.foreach(println)

  }
}
