package com.techsun.datanow.sync

import java.util.Properties

import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

class Sync_CmsDoneRecord extends TMysqlDataSync {
  /**
   * 同步工作方法
   *
   * @param spark         spark 会话对象
   * @param mysqlUrl      mysql连接地址
   * @param mysqlDriver   mysql连接驱动
   * @param mysqlUser     mysql用户名
   * @param mysqlPassword mysql密码
   * @param kuduMaster    kudu Master 节点
   * @return 同步成功或失败
   */
  override def sync(spark: SparkSession, mysqlUrl: String, mysqlDriver: String, mysqlUser: String, mysqlPassword: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {

    Logger.getLogger("org").setLevel(Level.ERROR)
    Logger.getLogger("com").setLevel(Level.ERROR)
    val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)

    def dbConnProperties(user: String, pass: String): Properties = {
      val ConnProperties = new Properties();
      ConnProperties.put("driver", mysqlDriver);
      ConnProperties.put("user", user);
      ConnProperties.put("password", pass);
      ConnProperties.put("fetchsize", "1000"); //读取条数限制
      ConnProperties.put("batchsize", "10000"); //写入条数限制
      return ConnProperties;
    }

    val readConnProperties = dbConnProperties(mysqlUser, mysqlPassword)
    val tableName: String = "sh_cmsdonerecord"
    val sql = "select \n\t`record_id`,\n  `is_delete`,\n  `delete_time`,\n  `create_time`,\n  `update_time`,\n  `member_id`,\n  `channel`,\n  `tile_id`,\n  `tile_type`,\n  `title`,\n  `inside_title`,\n  `do_datetime`,\n  `points_type`,\n  `points`,\n  `is_lucky`,\n  `question`,\n  `options`,\n  `ugc_media_list`,\n  `deliver_way`,\n  `order_no`,\n  `express_order_no`,\n  `express_company`,\n  `express_name`,\n  `express_address`,\n  `express_mobile`,\n  `store_name`,\n  `store_code`,\n  `store_address`,\n  `coupon_name`,\n  `coupon_end_time`,\n  `coupon_code`,\n  `gift_type`,\n  `gift_url`,\n  `event_no`,\n  `coupon_start_time`\nfrom t_cms_done_record"

    var df: org.apache.spark.sql.DataFrame = null
    try {
      df = spark.read.jdbc(mysqlUrl, s"(${sql}) t", readConnProperties)
      val count = df.count();
      printf("t_cms_done_record query count: %d\n", count)
    } catch {
      case sslEx: javax.net.ssl.SSLException => {
        printf("sh_cmsdonerecord spark.read exception: " + sslEx.getMessage)
      }
      case ex: Exception => {
        printf("sh_cmsdonerecord spark.read exception: " + ex.getMessage)
      }
    }

    try {

      kuduContext.upsertRows(df, tableName)
    } catch {
      case sslEx: javax.net.ssl.SSLException => {
        printf("sh_cmsdonerecord kuduContext.upsertRows exception: " + sslEx.getMessage)
      }
      case ex: Exception => {
        printf("sh_cmsdonerecord kuduContext.upsertRows exception: " + ex.getMessage)
      }
    }

    return true
  }
}
