import org.apache.spark.SparkConf
import org.apache.spark.SparkContext._
import org.apache.spark.sql.types.{StructType, StructField, StringType, IntegerType};
import org.apache.spark.sql.SparkSession;
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, _}
import org.apache.spark.sql.SQLContext

object SparkStatCleanJob 
{
  def main(args: Array[String]): Unit = 
  {
    val spark = SparkSession.builder().appName("SparkStatCleanJob")
      .master("local[2]").getOrCreate()

    val accessRDD = spark.sparkContext.textFile("access.log")



    println("-----------------------------------下面是各种类型检查-------------------------------------------------------------")

    //RDD ==> DF，创建生成DataFrame
    println("type of accessRDD=",accessRDD.getClass.getSimpleName)//类型是MapPartitionsRDD
    var y=accessRDD.map(x => AccessConvertUtil.parseLog(x) )
    println("y.collect()="+y.collect())
    println("type of y=",y.getClass.getSimpleName)//类型是MapPartitionsRDD
    var z=AccessConvertUtil.struct
    println("type of AccessConvertUtil.stuct",z.getClass.getSimpleName)//类型是StructType$
    val accessDF = spark.createDataFrame(y,z)
    accessDF.show()
    println("-----------------------------------下面注册临时表-------------------------------------------------------------")

    accessDF.createOrReplaceTempView("scheduling")
    val sqlDF = spark.sql("SELECT * FROM scheduling where traffic = 'bus'")
    sqlDF.show()



    spark.stop()
  }

}


//修改代码的重要参考链接:
// https://blog.csdn.net/wt346326775/article/details/72871567/


