package com.shujia.test

import com.shujia.util.SparkTool
import org.apache.spark.sql.SQLContext

object TestParquet extends SparkTool {
  /**
    * 编写spark业务逻辑
    *
    */
  override def run(args: Array[String]): Unit = {

    val sQLContext = new SQLContext(sc)
    val df = sQLContext.read.parquet("D:\\data\\dwi_staypoint_msk_d\\day_id=20180503")

    df.show()

  }

  /**
    * 初始化方法，在子类设置spark运行时需要的参数
    * conf.setMaster("local")
    */
  override def init(): Unit = {
    conf.setMaster("local")
  }
}
