package com.bishe.cyh.spark

import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.DriverManager



/**
 *
 * @Author: Poppin
 * @Date: 2022/2/28 20:26
 * @Version: 1.0
 */
object SparkTask {

  def Task(file: String,float: Float,Snowid: String): Unit ={
    val conf = new SparkConf().setAppName("SparkTask").setMaster("local[*]")
    val context = new SparkContext(conf)
    /**
     * 人脸识别比对
     */
    val value = context.binaryFiles("hdfs://192.168.66.157:9000/"+file+"/*")
    val countImage = value.count().toInt
    value.foreachPartition{
      iter=>{
        ArcFace.init(Snowid,float)
        iter.foreach{
          valueFile=>{
            val strings = valueFile._1.split("/")
            val length = strings.length
            val str = strings(length - 1)
            val bytes = valueFile._2.toArray()
            ArcFace.JC(bytes,str)
          }
        }
      }
    }
    val tableName = Snowid+"re"
    val HBase_conf = HBaseConfiguration.create()
    HBase_conf.set("hbase.zookeeper.quorum","192.168.66.157")
    HBase_conf.set("hbase.zookeeper.property.clientPort","2181")
    HBase_conf.set(TableInputFormat.INPUT_TABLE,tableName)
    val HBaseRDD = context.newAPIHadoopRDD(HBase_conf, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])
    /**
     * 人脸识别比对
     */
    val rdd1 = HBaseRDD.map(rdd => {
      val value1 = rdd._2
      val str = Bytes.toString(value1.getRow)
      val cell = value1.getValue(Bytes.toBytes("IS"), Bytes.toBytes("sift"))
      (str, cell)
    })
    rdd1.foreachPartition{
      iter=>{
        ArcFace.GetFaceFeature()
        iter.foreach{
          valueFial=>{
            ArcFace.Computer(valueFial._2,valueFial._1)
          }
        }
        ArcFace.unit()
      }
    }
    /**
     * 数据统计
     */
    val tableName1 = Snowid+"data"
    val HBase_conf1 = HBaseConfiguration.create()
    HBase_conf1.set("hbase.zookeeper.quorum","192.168.66.157")
    HBase_conf1.set("hbase.zookeeper.property.clientPort","2181")
    HBase_conf1.set(TableInputFormat.INPUT_TABLE,tableName1)
    val HBaseRDD1 = context.newAPIHadoopRDD(HBase_conf1, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])
    val rdd3 = HBaseRDD1.map(rdd => {
      val value = rdd._2
      val Gender = Bytes.toString(value.getValue(Bytes.toBytes("Data"), Bytes.toBytes("gender"))).toInt
      val Age = Bytes.toString(value.getValue(Bytes.toBytes("Data"), Bytes.toBytes("age"))).toInt
      (Gender, Age)
    })
    val male = rdd3.filter(rddq=>{
      rddq._1==0
    }).count().toInt
    val female = rdd3.filter(rddq=>{
      rddq._1==1
    }).count().toInt
    val unknown = rdd3.filter(rddq=>{
      rddq._1== -1
    }).count().toInt
    val rdd4 = rdd3.filter(rddq => {
      rddq._2 == -1
    }).count().toInt
    val rdd0_10 = rdd3.filter(rddq => {
      rddq._2 > 0 && rddq._2 <= 10
    }).count().toInt
    val rdd11_20 = rdd3.filter(rddq => {
      rddq._2 > 10 && rddq._2 <= 20
    }).count().toInt
    val rdd21_30 = rdd3.filter(rddq => {
      rddq._2 > 20 && rddq._2 <= 30
    }).count().toInt
    val rdd31_40 = rdd3.filter(rddq => {
      rddq._2 > 30 && rddq._2 <= 40
    }).count().toInt
    val rdd41_50 = rdd3.filter(rddq => {
      rddq._2 > 40 && rddq._2 <= 50
    }).count().toInt
    val rdd51_60 = rdd3.filter(rddq => {
      rddq._2 > 50 && rddq._2 <= 60
    }).count().toInt
    val rdd61_70 = rdd3.filter(rddq => {
      rddq._2 > 60 && rddq._2 <= 70
    }).count().toInt
    val rdd70_80 = rdd3.filter(rddq => {
      rddq._2 > 70 && rddq._2 <= 80
    }).count().toInt
    val rdd80 = rdd3.filter(rddq => {
      rddq._2 > 80
    }).count().toInt

    val tablename1 = Snowid+"NN"
    val HBase_conf2 = HBaseConfiguration.create()
    HBase_conf2.set("hbase.zookeeper.quorum","192.168.66.157")
    HBase_conf2.set("hbase.zookeeper.property.clientPort","2181")
    HBase_conf2.set(TableInputFormat.INPUT_TABLE,tablename1)
    val HBaseRDD2 = context.newAPIHadoopRDD(HBase_conf2, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result])

    val NN = HBaseRDD2.count().toInt
    val connection = DriverManager.getConnection("jdbc:mysql://11991199.xyz:3506/bysj?useUnicode=true&characterEncoding=utf-8&useSSL=false","root","asdfghjkl")
    val sql = "insert into datajieguy(id,imagecount,male,female,unknown,rdd4,rdd10,rdd20,rdd30,rdd40,rdd50,rdd60,rdd70,rdd80,rdd81,nn) value(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"
    val ps = connection.prepareStatement(sql)

    ps.setString(1,Snowid)
    ps.setInt(2,countImage)
    ps.setInt(3,male)
    ps.setInt(4,female)
    ps.setInt(5,unknown)
    ps.setInt(6,rdd4)
    ps.setInt(7,rdd0_10)
    ps.setInt(8,rdd11_20)
    ps.setInt(9,rdd21_30)
    ps.setInt(10,rdd31_40)
    ps.setInt(11,rdd41_50)
    ps.setInt(12,rdd51_60)
    ps.setInt(13,rdd61_70)
    ps.setInt(14,rdd70_80)
    ps.setInt(15,rdd80)
    ps.setInt(16,NN)
    ps.addBatch()
    ps.executeBatch()
    ps.close()
    connection.close()
    HBaseClient.deleteTable(Snowid)


    context.stop()
  }

}
