package com.techsun.datanow.sync

import com.microsoft.azure.cosmosdb.spark.config.Config
import com.microsoft.azure.cosmosdb.spark.schema._
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

/**
 *
 * @author Song
 * @date 2020/9/22
 */
class Sync_TsrProduct extends TDataSync {
  /**
   * 同步工作方法
   *
   * @param spark             spark 会话对象
   * @param cosmosDbEndpoint  CosmosDB Endpoint
   * @param cosmosDbMasterkey CosmosDB Endpoint 对应的密钥
   * @param kuduMaster        kudu Master 节点
   * @return 同步成功或失败
   */
  override def sync(spark: SparkSession, cosmosDbEndpoint: String, cosmosDbMasterkey: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {
    try {
      Logger.getLogger("org").setLevel(Level.ERROR)
      Logger.getLogger("com").setLevel(Level.ERROR)
      val tableName = "dimdatas"
      val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)
      val config = Config(Map("Endpoint" -> cosmosDbEndpoint,
        "Masterkey" -> cosmosDbMasterkey,
        "Database" -> "VF_Vans_CN",
        "preferredRegions" -> "China East 2;",
        "Collection" -> "TsrProduct",
        "query_custom" ->
          s"""SELECT 'vanscn' AS project,'product' as  dimkey, c.id as  key , c.id AS code, CONCAT(c.zhName,'[',c.code,']') AS name FROM c
             | where c.zhName != null and c.code != null and  c._ts >= ${beginTime}""".stripMargin) )

      val schema = StructType(
        List(
          StructField("project", StringType, true),
          StructField("dimkey", StringType, true),
          StructField("key", StringType, true),
          StructField("code", StringType, true),
          StructField("name", StringType, true)
        )
      )

      val df = spark.sqlContext.read.schema(schema).cosmosDB(config)

      val df1 = df.selectExpr("project", "dimkey", "key", "code", "name", "reflect('java.util.UUID', 'randomUUID') as id")

      val count = df1.count();
      printf("Sync_TsrProduct query count: %d\n", count)
      kuduContext.upsertRows(df1, tableName)

      return true
    } catch {
      case ex: Exception => {
        println("Sync_TsrProduct exception: " + ex.getMessage)
        return false
      }
    }
  }
}
