package cn.doitedu.day07

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import scala.beans.BeanProperty

object T04_DataFrameDemo3 {

  def main(args: Array[String]): Unit = {

    //是对SparkContext的增强，里面包装这SparkContext
    val spark: SparkSession = SparkSession.builder().appName("SQLWordCount")
      .master("local[4]")
      .getOrCreate()

    //DataFrame也是一个抽象数据集 RDD + Schema

    val lines: RDD[String] = spark.sparkContext.textFile("data/login.txt")

    val beanRdd: RDD[LogBean2] = lines.map(line => {
      val fields = line.split(",")
      val uid = fields(0)
      val dt = fields(1)
      new LogBean2(uid, dt)
    })

    val df: DataFrame = spark.createDataFrame(beanRdd, classOf[LogBean2])

    df.show()

  }

}

class LogBean2(
  val id: String,
  @BeanProperty
  val date: String
) {

  def getId(): String = {
    this.id
  }


}