import org.apache.spark.sql.types.{DataTypes, StructField}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object data1_traffic {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("Spark Pi")
      .getOrCreate()
    val sc = spark.sparkContext
    //三种方式读取交通数据
    val rdd1=sc.textFile("E:\\Spark\\karry\\karry\\src\\main\\resources\\traffic-data.txt")
    rdd1.take(5).foreach(println)
    println(rdd1.count())
//    val scheme1 = StructField(Seq(
//      StructField("jcID", DataTypes.StringType),
//      StructField("jkID", DataTypes.StringType),
//      StructField("carID", DataTypes.StringType),
//      StructField("time", DataTypes.StringType),
//      StructField("speed", DataTypes.StringType),
//      StructField("luID", DataTypes.StringType),
//      StructField("quID", DataTypes.StringType),
//    ))
    import spark.implicits._
    val df:DataFrame=spark.read.text("E:\\Spark\\karry\\karry\\src\\main\\resources\\traffic-data.txt")
    val ds:Dataset[String]=spark.read.textFile("E:\\Spark\\karry\\karry\\src\\main\\resources\\traffic-data.txt")



    df.printSchema()
    df.show(3)
    ds.printSchema()
    ds.show(2)
    sc.stop()
  }


}
