package spark

import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import spark.DataFrameDemo.{hivePath, localPath}

/**
  *
  *
  * @author pinker on 2018/3/16
  */
object UDAFDemo {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[5]")
      .appName("rdd")
      .config("spark.local.dir", localPath)
      .config("spark.sql.warehouse.dir", hivePath)
      .getOrCreate()
    //    countElementByGroup(spark)
    createDataFrameDemo(spark)
  }

  case class Test(col11: String, col12: String)

  def countElementByGroup(sparkSession: SparkSession): Unit = {
    val rdd = sparkSession.sparkContext.textFile("src/main/resources/data/udaf.csv")
    val rdd1 = rdd.map(line => {
      val arr = line.split(",")
      Row(arr(0), arr(1))
    })
    val schema = StructType(StructField("col1", StringType, true) :: StructField("col2", StringType, true) :: Nil)
    val df = sparkSession.createDataFrame(rdd1, schema)
    df.show()
  }

  def createDataFrameDemo(spark: SparkSession): Unit = {
    val data = Seq(
      (7, Vectors.dense(0.0, 0.0, 18.0, 1.0), 1.0),
      (8, Vectors.dense(0.0, 1.0, 12.0, 0.0), 0.0),
      (9, Vectors.dense(1.0, 0.0, 15.0, 0.1), 0.0)
    )
    import spark.implicits._
    val ds = spark.createDataset(data)
    val df = ds.toDF("id", "features", "clicked")
    df.show()
    ds.show()
  }
}
