package com.itcast.spark.basePro

import org.apache.spark.mllib.linalg
import org.apache.spark.mllib.linalg.{Matrix, Vectors}
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * DESC:
 */
object _03IrisSparkSQlReader {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("_01IrisVarience").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    //读取数据
    val data: RDD[Row] = sc.textFile("./datasets/mldata/irisNoHeader.csv")
      .map(_.split(","))
      .map(x => Row(x(0).toDouble, x(1).toDouble, x(2).toDouble, x(3).toDouble, x(4)))
    //准备Schema-sepal_length,sepal_width,petal_length,petal_width,class
    val schema: StructType = new StructType()
      .add("sepal_length", "double", true)
      .add("sepal_width", DoubleType, true)
      .add("petal_length", "double", true)
      .add("petal_width", "double", true)
      .add("class", "string", true)
    val dataDF: DataFrame = spark.createDataFrame(data, schema)
    dataDF.show()
    dataDF.printSchema()
  }
}
