import com.doit.doitdata.process.Preprocess
import org.apache.spark.sql.SparkSession

/**
  * Created by hunter.coder 涛哥  
  * 2019/3/28 18:22
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:  测试清洗后的数据是否正常
  **/
object ReadParquetLog {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().enableHiveSupport().appName(Preprocess.getClass.getSimpleName).master("local").getOrCreate()
    import spark.implicits._

    import org.apache.spark.sql.functions._
    val tmp = spark.read.parquet("G:\\logs\\2019-04-17-out")
    tmp.printSchema()
     tmp.show(10,false)


    spark.close()

  }
}
