package com.techsun.datanow

import java.text.SimpleDateFormat
import java.util.{Calendar, Date, TimeZone}

import com.techsun.datanow.sync._
import org.apache.commons.lang.StringUtils
import org.apache.spark.sql.SparkSession
import org.apache.log4j.{Level, Logger}


/**
 * Hello world!
 *
 */
object SHDataSyncApp {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val cosmosDbEndpoint = args(0);
    val cosmosDbMasterkey = args(1);
    val kuduMaster = args(2);
    var mysqlUrl = args(3)
    var mysqlDriver = args(4)
    var mysqlUser = args(5)
    var mysqlPassword = args(6)
    val dateset = if (args.length > 7) args(7) else null;

    printf("SHDataSyncApp started!\n\ncosmosDbEndpoint: %s\nkuduMaster:%s\n", cosmosDbEndpoint, kuduMaster)

    val cal: Calendar = Calendar.getInstance(TimeZone.getTimeZone("GMT"))
    val today_zeroTime = getZero_time(cal.getTime);
    cal.add(Calendar.DATE, -7)
    val yestoday_zeroTime = getZero_time(cal.getTime);
    var beginTime = yestoday_zeroTime;
    var endTime = today_zeroTime;
    if (StringUtils.isNotBlank(dateset) && dateset == "t") {
      if (args.length < 10) {
        printf("Parameters count error! There didn't set cycle time")
        System.exit(1)
      }
      beginTime = args(8).toLong
      endTime = args(9).toLong
    }

    printf("Data sync time: %d - %d\n", beginTime, endTime)

    val spark = SparkSession
      .builder()
      .appName("CustomerInfoSync")
      .config("spark.debug.maxToStringFields", "200")
      .getOrCreate();


  try {
      printf("执行-----new")

      new Sync_TsrCustomerInfo().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_TsrMemberInfo().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_Coupon().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_Order().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_OrderDetail().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_OrderPayment().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_PointAccount().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_PointRecord().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_BehaviorRecord().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_Store().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_ThirdinfoSync().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_TierRecord().sync(spark, cosmosDbEndpoint, cosmosDbMasterkey, kuduMaster, beginTime, endTime)
      new Sync_CmsTile().sync(spark, mysqlUrl, mysqlDriver, mysqlUser, mysqlPassword, kuduMaster, beginTime, endTime)
      new Sync_CmsDoneRecord().sync(spark, mysqlUrl, mysqlDriver, mysqlUser, mysqlPassword, kuduMaster, beginTime, endTime)

      printf("结束-----")

    }
    finally {
      spark.close()
      System.exit(0)
    }
}

  /**
   * 获取指定日期的 0 点时间戳
   *
   * @param d 要获取点日期，yyyy-MM-dd
   * @return 指定日期的 0 点时间戳
   */
  def getZero_time(d: Date): Long = {
    val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
    //晚上2点执行,而服务器时间为昨天晚上18点执行,因此加上16个小时,得到东八区对应的日期
    val a = (dateFormat.parse(dateFormat.format(d)).getTime / 1000) + (16 * 60 * 60)
    val str = a + ""
    str.toLong
  }


}
