package com.techsun.datanow.sync

import com.microsoft.azure.cosmosdb.spark.config.Config
import com.microsoft.azure.cosmosdb.spark.schema._
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

class Sync_PointRecord extends TDataSync {
  /**
   * 同步工作方法
   *
   * @param spark             spark 会话对象
   * @param cosmosDbEndpoint  CosmosDB Endpoint
   * @param cosmosDbMasterkey CosmosDB Endpoint 对应的密钥
   * @param kuduMaster        kudu Master 节点
   * @return 同步成功或失败
   */
  override def sync(spark: SparkSession, cosmosDbEndpoint: String, cosmosDbMasterkey: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {
    try {
      Logger.getLogger("org").setLevel(Level.ERROR)
      Logger.getLogger("com").setLevel(Level.ERROR)
      val tableName = "sh_tsrpointrecord"
      val config = Config(Map("Endpoint" -> cosmosDbEndpoint,
        "Masterkey" -> cosmosDbMasterkey,
        "Database" -> "VF_Vans_CN",
        "preferredRegions" -> "China East 2;",
        "Collection" -> "TsrPointRecord",
        "query_custom" ->
          s""" SELECT c.id id, c.accountId accountid, c.memberId memberid, c.customerId customerid, c.groupId groupid,
             | c.source source, c.forUpgrade forupgrade, c.costId costid, c.operType opertype, c.orderId orderid,
             | c.detailId detailid, c.behaviorId behaviorid, c.direction direction, c.originalPoint originalpoint,
             | c.originalPointTotal originalpointtotal, c.availablePoint availablepoint, c.validTime as validtime,
             | c.invalidTime as invalidtime, c.baseId baseid, c.promotionId promotionid, c.bulkCampId as bulkcampid,
             | c.behavCampId as behavcampid, c.couponId couponid,  c.createTime as createtime,
             |  c.updateTime as updatetime,c.name name,c.originId originid, c.tileId tileid,c._ts as _ts
             |FROM c
             | where c._ts >= ${beginTime}""".stripMargin))
      val schema = StructType(
        List(
          StructField("id", StringType, true),
          StructField("memberid", StringType, true),
          StructField("accountid", StringType, true),
          StructField("customerid", StringType, true),
          StructField("groupid", StringType, true),
          StructField("source", IntegerType, true),
          StructField("forupgrade", IntegerType, true),
          StructField("costid", StringType, true),
          StructField("opertype", IntegerType, true),
          StructField("orderid", StringType, true),
          StructField("detailid", StringType, true),
          StructField("behaviorid", StringType, true),
          StructField("direction", IntegerType, true),
          StructField("originalpoint", DoubleType, true),
          StructField("originalpointtotal", DoubleType, true),
          StructField("availablepoint", DoubleType, true),
          StructField("validtime", TimestampType, true),
          StructField("invalidtime", TimestampType, true),
          StructField("baseid", StringType, true),
          StructField("promotionid", StringType, true),
          StructField("bulkcampid", StringType, true),
          StructField("behavcampid", StringType, true),
          StructField("couponid", StringType, true),
          StructField("createtime", TimestampType, true),
          StructField("updatetime", TimestampType, true),
          StructField("name", StringType, true),
          StructField("originid", StringType, true),
          StructField("tileid", StringType, true),
          StructField("_ts", IntegerType, true)
        ))

      val df = spark.sqlContext.read.schema(schema).cosmosDB(config)
      val count = df.count();
      printf("Sync_TsrPointRecord query count: %d\n", count)
      val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)
      kuduContext.upsertRows(df, tableName)
      return true
    } catch {
      case ex: Exception => {
        println("Sync_TsrPointRecord exception: " + ex.getMessage)
        return false
      }
    }
  }
}
