package com.techsun.datanow.sync

import com.microsoft.azure.cosmosdb.spark.config.Config
import com.microsoft.azure.cosmosdb.spark.schema._
import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._


class Sync_Store extends TDataSync {
  /**
   * 同步工作方法
   *
   * @param spark             spark 会话对象
   * @param cosmosDbEndpoint  CosmosDB Endpoint
   * @param cosmosDbMasterkey CosmosDB Endpoint 对应的密钥
   * @param kuduMaster        kudu Master 节点
   * @return 同步成功或失败
   */
  override def sync(spark: SparkSession, cosmosDbEndpoint: String, cosmosDbMasterkey: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {
    try {
      Logger.getLogger("org").setLevel(Level.ERROR)
      Logger.getLogger("com").setLevel(Level.ERROR)
      val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)
      val config = Config(Map(
        "Endpoint" -> cosmosDbEndpoint,
        "Masterkey" -> cosmosDbMasterkey,
        "Database" -> "VF_Vans_CN",
        "preferredRegions" -> "China East 2;",
        "Collection" -> s"TsrStore",
        "query_custom" -> s""" SELECT
                             | c.id as id,
                             | c.code as code,
                             | c.source as source ,
                             | c.oldCode as oldcode ,
                             | c.zhName as zhname,
                             | c.enName as enname,
                             | c.brand as brand,
                             | c.country as country,
                             | c.nickName as nickname,
                             | c.type as type,
                             | c.state as state,
                             | c.openDate as opendate,
                             | c.closeDate as closedate,
                             | c.storeGrading as storegrading,
                             | c.areaCode as areacode,
                             | c.vfRegion as vfregion,
                             | c.brandRegion as brandregion,
                             | c.province as province,
                             | c.city as city,
                             | c.area as area,
                             | c.cityTier as citytier,
                             | c.address as address,
                             | c.contact as contact,
                             | c.ownership as ownership,
                             | c.storeType as storetype,
                             | c.parentCode as parentcode,
                             | c.createTime as createtime,
                             | c.updateTime  as updatetime,
                             | c.closeMigrateStoreCode as closemigratestorecode,
                             | c.closeMigrateStoreId as closemigratestoreid,
                             | c.migrateDate as migratedate,
                             | c._ts as _ts
                             | FROM c where c._ts >= ${beginTime}""".stripMargin))
      val schema = StructType(
        List(
          StructField("id", StringType, true),
          StructField("code", StringType, true),
          StructField("source", IntegerType, true),
          StructField("oldcode", StringType, true),
          StructField("zhname", StringType, true),
          StructField("enname", StringType, true),
          StructField("brand", StringType, true),
          StructField("country", StringType, true),
          StructField("nickname", StringType, true),
          StructField("type", IntegerType, true),
          StructField("state", IntegerType, true),
          StructField("opendate", TimestampType, true),
          StructField("closedate", TimestampType, true),
          StructField("storegrading", StringType, true),
          StructField("areacode", StringType, true),
          StructField("vfregion", StringType, true),
          StructField("brandregion", StringType,true),
          StructField("province", StringType, true),
          StructField("city", StringType, true),
          StructField("area", StringType, true),
          StructField("citytier", StringType, true),
          StructField("address", StringType, true),
          StructField("contact", StringType, true),
          StructField("ownership", StringType, true),
          StructField("storetype", StringType, true),
          StructField("parentcode", StringType, true),
          StructField("createtime", TimestampType, true),
          StructField("updatetime", TimestampType, true),
          StructField("closemigratestorecode", StringType, true),
          StructField("closemigratestoreid", StringType, true),
          StructField("migratedate", TimestampType, true),
          StructField("_ts", IntegerType, true)
        ))

      val df = spark.sqlContext.read.schema(schema).cosmosDB(config)
      val count = df.count();
      printf("Sync_TsrStore query count: %d\n", count)
      kuduContext.upsertRows(df, "sh_tsrstore")
      return true
    } catch {
      case ex: Exception => {
        println("Sync_TsrStore exception: " + ex.getMessage)
        return false
      }
    }
  }
}
