package com.fwmagic.spark.ml.utils

import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.sql.functions.udf

import scala.collection.mutable

/**
 * 特征向量化
 */
object VectorUtils {


  /**
   * 数组转向量1
   */
  val arr2vec = (arr: mutable.WrappedArray[Double]) => {
    Vectors.dense(arr.toArray)
  }

  /**
   * 数组转向量2
   */
  val arr2vec2 = (arr: mutable.WrappedArray[String]) => {
    val features = new Array[Double](20)
    arr.foreach(e => {
      val index = (e.hashCode() & Integer.MAX_VALUE) % features.size
      features(index) = 1.0
    })
    Vectors.dense(features)
  }


  /**
   * 数组转向量3
   */
  val arr2vec3 = udf((arr: mutable.WrappedArray[Double]) => {
    Vectors.dense(arr.toArray)
  })
}
