import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}

object data1_SQL2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("Spark Pi")
      .getOrCreate()
    val sc = spark.sparkContext
    val dataSet1:DataFrame=spark.read.csv("F:\\Spark\\karry\\karry\\src\\main\\resources\\data.csv")
    dataSet1.printSchema()
    dataSet1.show()
    //数据类型 数据表
    val scheme1=StructType(Seq(
      StructField("id",DataTypes.IntegerType),
      StructField("name",DataTypes.StringType),
      StructField("course",DataTypes.StringType),
      StructField("score",DataTypes.IntegerType)
    ))
    val dataSet2 =spark.read.schema(scheme1).csv("")
    dataSet2.printSchema()
    dataSet2.show()

    val dataSet3=spark.read.option("header","true").csv("F:\\Spark\\karry\\karry\\src\\main\\resources\\data.txt")
    dataSet3.printSchema()
    dataSet3.show()
    //取别名（中文转英语）
    val dataSet4=dataSet3.toDF("st_id","st_name","st_course","st_score")

   dataSet4.printSchema()
    dataSet4.show()

  }

}
