package com.txl.cn.spark05

import org.apache.avro.generic.GenericData.StringType
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
  * Created by txl on 2018/1/2.
  */
object DataSetDemo {
  def main(args: Array[String]): Unit = {
    val session: SparkSession = SparkSession.builder()
      .master("local")
        .appName("SetDemo")
          .getOrCreate()
    val lines = session.read.textFile("wc.txt")
   //val collected = lines.collect()
    import session.implicits._
    val words = lines.flatMap(_.split(" "))
    /*val wordSchema = StructType(
      List(
        // 指定了一个字段的类型 名称  类型  是否为空
        StructField("word",StringType,true)
      )
    )
    wordSchema*/
   /* words.createTempView("t_word")
    val sql: DataFrame = session.sql("select value word,count(*) num from t_word group by word order by count desc")
    sql.show()
    session.close()*/
  val select = words.select("value")
    val grouped = select.groupBy("value")
    grouped.count().show()



  }

}
