package com.shujia.mllib

import org.apache.spark.ml.linalg.{SparseVector, Vectors}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DEmo5ImageRead {
  def main(args: Array[String]): Unit = {

    /**
      * 特征工程
      *
      */

    val spark: SparkSession = SparkSession.builder()
      .appName("kmeans")
      .master("local[8]")
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    /**
      * |-- image: struct (nullable = true)
      * |    |-- origin: string (nullable = true)文件名
      * |    |-- height: integer (nullable = true) 文件高度
      * |    |-- width: integer (nullable = true)文件宽度
      * |    |-- nChannels: integer (nullable = true)
      * |    |-- mode: integer (nullable = true)
      * |    |-- data: binary (nullable = true) 数据
      *
      */
    val imageData: DataFrame = spark
      .read
      .format("image")
      .load("D:\\课件\\机器学习数据\\手写数字\\train")
      .repartition(8)

    imageData.printSchema()

    //取出文件名数据
    val data: DataFrame = imageData.select($"image.origin" as "name", $"image.data" as "data")


    val nameandfeatures: DataFrame = data
      //将DF转换成DataSet ,才能使用map函数
      .as[(String, Array[Byte])]
      .map(kv => {
        //取出文件名
        val name: String = kv._1.split("/").last
        //数据
        val value: Array[Byte] = kv._2

        //将每一个像素的转换成Double
        //将数据转换成0或者1
        val idata: Array[Double] = value
          .map(_.toDouble)
          .map(p => {
            if (p < 0) {
              1.0
            } else {
              0.0
            }
          })

        //将数据转换成稀疏向量
        val sparse = Vectors.dense(idata)

        (name, sparse)
      }).toDF("name", "features")

    //读取图片标签
    val labelData: DataFrame = spark.read
      .format("csv")
      .option("sep", " ")
      .schema("name STRING,label DOUBLE")
      .load("D:\\课件\\机器学习数据\\手写数字\\train.txt")


    //关联标签和特征
    val resultData: DataFrame = nameandfeatures
      .join(labelData.hint("broadcast"), List("name"), "inner")
      .select("label", "features")



    //保存数据
    resultData
      .write
      .mode(SaveMode.Overwrite)
      .format("libsvm")
      .save("spark/data/images")
  }

}
