package com.shujia

import com.shujia.spark.SparkTool

object TestData extends SparkTool {
  /**
    * 在run方法里面编写spark业务逻辑
    */
  override def run(args: Array[String]): Unit = {
    val staypointPOutPutPath = "C:\\data\\dal_tour_province_tourist_msk_d\\day_id=20180503"
    val df = sql.read.parquet(staypointPOutPutPath)
    df.show()
    println(df.count())

  }

  /**
    * 初始化spark配置
    *  conf.setMaster("local")
    */
  override def init(): Unit = {
    conf.setMaster("local")
  }
}
