package never


import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object AdvertisingParquet {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("AdvertisingParquet").setMaster("local[*]")

    //使用Kryo序列化库
    conf.set("spark.serializer","org.apache.spark.serializer.KryoSerialization")

    //按照anappy方式压缩文件
    conf.set("spark.sql.parquet.compression.codec","snappy")

    //在Kryo序列化库中注册自定义的类集合
    conf.registerKryoClasses(Array(AdvertisingParquet.getClass))


    val sc: SparkContext = new SparkContext(conf)

    val sql: SQLContext = new SQLContext(sc)

    //读取文件路径
    val input = ("D:\\java-items\\广告平台\\广告平台-1\\2016-10-01_06_p1_invalid.1475274123982.log.FINISH")

    //输出文件路径
    val output= ("D:\\java-items\\广告平台\\广告平台-1\\parquet")

    MYUtil.convert(sql , input , output)

    sc.stop()


  }

}
