package com.techsun.datanow.sync

import com.microsoft.azure.cosmosdb.spark.config.Config
import com.microsoft.azure.cosmosdb.spark.schema._
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._


class Sync_TsrMemberInfo extends TDataSync {
  override def sync(spark: SparkSession, cosmosDbEndpoint: String, cosmosDbMasterkey: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {

    try {
      Logger.getLogger("org").setLevel(Level.WARN)
      Logger.getLogger("com").setLevel(Level.WARN)
      val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)
      val tableName = "sh_tsrmemberinfo"
      val config = Config(Map(
        "Endpoint" -> cosmosDbEndpoint,
        "Masterkey" -> cosmosDbMasterkey,
        "Database" -> "VF_Vans_CN",
        "preferredRegions" -> "China East 2",
        "Collection" -> "TsrMemberInfo",
        "query_custom" ->
          s"""
             |select
             |c.id,
             |c.shId shid,
             |c.loyaltyId loyaltyid,
             |c.customerId customerid,
             |c.name,
             |c.phone,
             |c.cardNumber cardnumber,
             |c.source,
             |c.registDate registdate,
             |c.registStore registstore,
             |c.memberStatus memberstatus,
             |c.tierCode tiercode,
             |c.tierName tiername,
             |c.firstOrderTime firstordertime,
             |c.lastOrderTime lastordertime,
             |c.retainScore retainscore,
             |c.retainMoney retainmoney,
             |c.retainBuyCount retainbuycount,
             |c.createTime createtime,
             |c.updateTime updatetime,
             |c.tp,
             |c.reservedField1 reservedfield1,
             |c.reservedField2 reservedfield2,
             |c.reservedField3 reservedfield3,
             |c.reservedField4 reservedfield4,
             |c.reservedField5 reservedfield5,
             |c.belongStore belongstore,
             |c._ts
             |from c
             |where c._ts > ${beginTime}
             |""".stripMargin
      ))

      val schema = StructType(
        List(StructField("id", StringType, true),
          StructField("shid", StringType, true),
          StructField("loyaltyid", StringType, true),
          StructField("customerid", StringType, true),
          StructField("name", StringType, true),
          StructField("phone", StringType, true),
          StructField("cardnumber", StringType, true),
          StructField("source", IntegerType, true),
          StructField("registdate", TimestampType, true),
          StructField("registstore", StringType, true),
          StructField("memberstatus", IntegerType, true),
          StructField("tiercode", StringType, true),
          StructField("tiername", StringType, true),
          StructField("firstordertime", TimestampType, true),
          StructField("lastordertime", TimestampType, true),
          StructField("retainscore", DoubleType, true),
          StructField("retainmoney", DoubleType, true),
          StructField("retainbuycount", IntegerType, true),
          StructField("createtime", TimestampType, true),
          StructField("updatetime", TimestampType, true),
          StructField("tp", StringType, true),
          StructField("reservedfield1", StringType, true),
          StructField("reservedfield2", StringType, true),
          StructField("reservedfield3", StringType, true),
          StructField("reservedfield4", StringType, true),
          StructField("reservedfield5", StringType, true),
          StructField("belongstore", StringType, true),
          StructField("_ts", IntegerType, true)
        ))
      val df = spark.sqlContext.read.schema(schema).cosmosDB(config)
      val count = df.count();
      printf("Sync_TsrMemberInfo query count: %d\n", count)
      kuduContext.upsertRows(df, tableName)
      return true
    } catch {
      case ex: Exception => {
        println("Sync_TsrMemberInfo exception: " + ex.getMessage)
        return false
      }
    }

  }
}
