package spark.so

import java.util

import com.sun.jersey.core.util.Base64
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
/**
  * Created by zhangbn on 2018/10/10.
  */
object SoCompLocal {
  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
    System.setProperty("HADOOP_USER_NAME", "admin")

    val sparkConf = new SparkConf()
      .setAppName("SoComp")
      .setMaster("local[*]")

    sparkConf.set("spark.sql.crossJoin.enabled", "true")

    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()

    spark.sqlContext.sql("use ods")
    spark.sqlContext.sql(
      """
        |create table IF NOT EXISTS ods.face_similarity(
        |passid_1 string,
        |passid_2 string,
        |similarity double
        |)STORED AS parquet TBLPROPERTIES('parquet.compression'='SNAPPY')
      """.stripMargin)
    import spark.implicits._

    val oriDF = spark.sqlContext.sql("select pass_id,feature,quality_score from t_person_passinfo limit 3")

    val featureList = oriDF.select("feature")
      .map(row => {
        val str = row(0).asInstanceOf[String]
        Base64.decode(str)
      }).collectAsList()
    val passIdList = oriDF.select("pass_id")
      .map(row => {
        row(0).asInstanceOf[String]
      }).collectAsList()
    val scoreList = oriDF.select("quality_score")
      .map(row => {
        row(0).asInstanceOf[Double] / 100
      }).collectAsList()

    val f_BC = spark.sparkContext.broadcast(featureList)
    val p_BC = spark.sparkContext.broadcast(passIdList)
    val s_BC = spark.sparkContext.broadcast(scoreList)

    println(featureList)
    println(passIdList)
    println(scoreList)

    oriDF.select("feature").rdd.map(f => {
      println(featureCompByByte(Base64.decode(f.toString()), f_BC.value))
    })





    //    oriDF.show(false)
    //    println("...................1")
    //    oriDF.join(oriDF).show(false)


    //    val array = oriDF.collect()
    //    val lines = spark.sparkContext.parallelize(array)
    //    val rdd = lines.map(_.toString().split(",")).map(x=>Row(x(0),x(1),x(2)))

    //    val rdd = oriDF.rdd.map(x=>Row(x(0),x(1),x(2)))
    //    val structFields = Array(StructField("pass_id",StringType,true),StructField("pass_time",StringType,true),StructField("quality_score",DoubleType,true))
    //    val structType = StructType(structFields)
    //    val df = spark.createDataFrame(rdd,structType)
    //    df.show(false)
    //    println("...................2")
    //    df.join(df).show(false)


    //    val featureList = oriDF.select("feature")
    //      .map(row => {
    //        val str = row.get(0).asInstanceOf[String]
    //        Base64.decode(str)
    //      }).collectAsList()
    //
    //    val passIdList = oriDF.select("pass_id")
    //      .map(row => {
    //        row.get(0).asInstanceOf[String]
    //      }).collectAsList()
    //
    //    val featureListBC = spark.sparkContext.broadcast(featureList)
    //    val passIdListBC = spark.sparkContext.broadcast(passIdList)
    //
    //    oriDF.rdd.foreach(row => {
    //      val var1 = Base64.decode(row.get(1).asInstanceOf[String])
    //      val doubleArr = new FeatureCompare().featureCompByByte(var1, featureListBC.value)
    //      //      val list: List[Record] = List()
    //      for (i <- 0 until doubleArr.length) {
    //        spark.sqlContext.sql(s"insert into TABLE brsface.face_similarity VALUES (${row.get(0).asInstanceOf[String]}, ${passIdListBC.value.get(i).asInstanceOf[String]}, ${doubleArr(i)})")
    //
    //        //        list::Record(row.get(0).asInstanceOf[String], passIdListBC.value.get(i).asInstanceOf[String], doubleArr(i))
    //
    //      }
    //    })

    spark.close()
  }

  def featureCompByByte(var1: Array[Byte], var2: util.List[Array[Byte]]): Array[Double] = {

    Array(0.75, 0.85, 0.95)
  }

}
