package spark.so

import com.inf.featureCompare.FeatureCompare
import com.sun.jersey.core.util.Base64
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
/**
  * Created by zhangbn on 2018/10/10.
  */
object SoComp {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf()
      .setAppName("SoComp")

    sparkConf.set("spark.sql.crossJoin.enabled", "true")

    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()
    import spark.implicits._

    spark.sqlContext.sql("use brsface")
    spark.sqlContext.sql(
      """
        |create table IF NOT EXISTS brsface.face_similarity(
        |passid_1 string,
        |passid_2 string,
        |similarity double
        |)STORED AS parquet TBLPROPERTIES('parquet.compression'='SNAPPY')
      """.stripMargin)
    val oriDF = spark.sqlContext.sql("select pass_id,feature from t_person_passinfo where dt='180617' limit 3")

    val featureList = oriDF.select("feature")
      .map(row => {
        val str = row.get(0).asInstanceOf[String]
        Base64.decode(str)
      }).collectAsList()

    val passIdList = oriDF.select("pass_id")
      .map(row => {
        row.get(0).asInstanceOf[String]
      }).collectAsList()

    val featureListBC = spark.sparkContext.broadcast(featureList)
    val passIdListBC = spark.sparkContext.broadcast(passIdList)

    oriDF.rdd.foreach(row => {
      val var1 = Base64.decode(row.get(1).asInstanceOf[String])
      val doubleArr = new FeatureCompare().featureCompByByte(var1, featureListBC.value)
      //      val list: List[Record] = List()
      for (i <- 0 until doubleArr.length) {
        spark.sqlContext.sql(s"insert into TABLE brsface.face_similarity VALUES (${row.get(0).asInstanceOf[String]}, ${passIdListBC.value.get(i).asInstanceOf[String]}, ${doubleArr(i)})")

        //        list::Record(row.get(0).asInstanceOf[String], passIdListBC.value.get(i).asInstanceOf[String], doubleArr(i))

      }
    })


  }

}
