package com.shujia.ml

import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object Demo5ImageLoad {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .master("local[8]")
      .appName("point")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    /**
     * 1、特征工程:将原始的数据转换成向量
     */
    //读取图片数据
    val imageData: DataFrame = spark.read
      .format("image")
      .load("E:\\data\\train")

    imageData.printSchema()

    //编写自定义函数将图片的数据转换成向量
    val dataToVector: UserDefinedFunction = udf((data: Array[Byte]) => {
      val dataArray: Array[Double] = data
        //将二进制转换十进制
        .map(byte => byte.toDouble)
        //对数据进行归一化,黑色部分用0表示，白色区域用1表示
        .map(point => {
          if (point < 0) {
            1.0
          } else {
            0.0
          }
        })

      //将图片的数据转换成向量
      //转换成稀疏向量，节省空间
      Vectors.dense(dataArray).toSparse
    })

    //从文件路径中获取文件名
    val pathToName: UserDefinedFunction = udf((path: String) => {
      //获取文本名
      path.split("/").last
    })

    val featuresData: DataFrame = imageData
      .select($"image.origin" as "origin", $"image.data" as "data")
      //将原始的数据转换成特征向量
      .select(pathToName($"origin") as "name", dataToVector($"data") as "features")

    //读取目标值的数据
    val labelData: DataFrame = spark
      .read
      .schema("name STRING,label DOUBLE")
      .option("sep", " ")
      .csv("spark/data/image_res.txt")

    val data: DataFrame = featuresData
      //关联目标值和特征向量
      .join(labelData.hint("broadcast"), "name")
      .select($"label", $"features")


    //将处理好的数据保存到文件中
    data
      //避免产生小文件
      .coalesce(10)
      .write
      .format("libsvm")
      .mode(SaveMode.Overwrite)
      .save("spark/data/image_data")

  }
}
