package com.demo.study

import org.apache.spark.sql.SparkSession

object RDD_DF_DS {

  case class Coltest(id: Int, name: String, language: String) extends Serializable //定义字段名和类型

  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.2.0-bin-master")

    val spark = SparkSession
      .builder()
      .appName("PassinfoMetric_01")
      .master("local[1]")
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._

    //    val rdd=spark.sparkContext.parallelize(Seq(("a", 1), ("b", 1), ("a", 1)))

    val text = spark.sparkContext.textFile("E:\\hello.txt")
    val rdd = text.map(line => line.split(" ")).map(arr => (arr(0).toInt, arr(1), arr(2)))

    //    val ds = spark.read.text("E:\\hello.txt")
    //    ds.show()

    val rddDF = rdd.toDF("id", "name", "language")
    print("DataFrame:")
    rddDF.show()

    val rddDS = rdd.map(line => Coltest(line._1, line._2, line._3)).toDS
    rddDS.show()

    //    val ds = rddDF.as[Coltest]
    //    ds.show()

    spark.stop()
  }
}
