package org.qnit

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.qnit.util.{MysqlUtil, SparkUtil}

import java.sql.{DriverManager, ResultSet, Statement}
import scala.collection.JavaConverters._
import scala.collection.JavaConversions._

/**
 * @Description:
 * @Author: zhuxianyu
 * @Date: 2023/9/13 16:21
 * @Version: 1.0
 */
object Vec2Milvus {

  val milvusTemplate = new MilvusTemplate("node3", 19530)
  val dimension = 64
  val configArr = Array(
    ("user_embedding", "dws.dws_user_embedding"),
    ("user_label_w2v", "dws.dws_user_label_w2v")
  )

  def main(args: Array[String]): Unit = {
    val env = args(0)
    if (!SparkUtil.verifyEnv(env))
      System.exit(1)
    val spark: SparkSession = SparkUtil.initSparkSession(env, "Vec2Milvus")

    configArr.foreach(config => {
      val collectionName = config._1
      val hiveTableName = config._2
      println(s"env=$env\tcollectionName=$collectionName\thiveTableName=$hiveTableName")

      //MilvusTemplate.createIPCollection
      //如果存在集合则删除
      if (milvusTemplate.hasCollection(collectionName)) {
        milvusTemplate.deleteCollection(collectionName)
        Thread.sleep(5000)
      }
      milvusTemplate.createIPCollection(collectionName, dimension)

      // 读取hive表
      spark.table(hiveTableName).select("milvus_key", "vector").distinct().rdd.foreachPartition(par => {
        //每个worker上定义
        var vectors = List[java.util.List[java.lang.Float]]() //[[0.1,...],[0.2,....],[0.3,...],[0.4,...]]
        var ids = List[java.lang.Long]() // [1001,1002,...]

        par.foreach(row => {
          val id = row.getAs[Long]("milvus_key").asInstanceOf[java.lang.Long]
          val vector = row.getAs[Seq[Double]]("vector")
            .map(_.floatValue().asInstanceOf[java.lang.Float]) //1.Double转Float 2. scala.Float转java.lang.Float
            .asJava //scala.List转换为java.lang.List
          ids = id :: ids
          vectors = vector :: vectors
        })

        //MilvusTemplate.loadData
        milvusTemplate.loadData(vectors, ids, collectionName)
      })
    })

    spark.stop()
  }

}
