package cn.sheep.dmp.etl

import cn.sheep.dmp.beans.Log
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * Sheep.Old @ 64341393
  * Created 2018/3/28
  */
object Sql2Parquet {

    def main(args: Array[String]): Unit = {

        val sparkConf = new SparkConf().setAppName("日志转parquet文件")
          .setMaster("local[*]")
          .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") // RDD

        val sc = new SparkContext(sparkConf)
        val sqlc = new SQLContext(sc)

        val parquet = sqlc.read.parquet("F:\\dmp\\parquet2")

        parquet.registerTempTable("logs")
        sqlc.sql("select * from logs").show(100)

        sc.stop()
    }

}
