package com.cyy.log.spark_extract.batch

import com.cyy.log.spark_extract.common.{DateUtils, LogInfo}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

/**
  * @Author: Cyy
  * @Description:
  * @Date:Created in 21:22 2019/5/7
  */
object SparkBatchFormat {

//  def ETL(sc:SparkContext,source:RDD[LogPage],)
  def main(args: Array[String]): Unit = {
    val spark =SparkSession.builder().master("local[2]").appName("SparkLogExtract").getOrCreate()
    val path =args(0)
    val access = spark.sparkContext.textFile(path)
//    access.take(100)
    //反射方式
    import spark.implicits._
      val logDF=access.map(LogInfo.parseLog_reflection).toDF()
    //编程方式
//    val logDF=spark.createDataFrame(access.map(LogInfo.parseLog_program),LogInfo.struct)
    logDF.createOrReplaceTempView("logInfo")
    spark.sql("select ip,sum(traffic) from logInfo group by ip").show()
    logDF.printSchema()
//    logDF.show()
//    result.show()
    spark.stop()
  }
}




