package com.techsun.datanow.sync

import java.util.{Properties, UUID}

import org.apache.kudu.spark.kudu.KuduContext
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

class Sync_CmsTile extends TMysqlDataSync {
  /**
   * 同步工作方法
   *
   * @param spark         spark 会话对象
   * @param mysqlUrl      mysql连接地址
   * @param mysqlDriver   mysql连接驱动
   * @param mysqlUser     mysql用户名
   * @param mysqlPassword mysql密码
   * @param kuduMaster    kudu Master 节点
   * @return 同步成功或失败
   */
  override def sync(spark: SparkSession, mysqlUrl: String, mysqlDriver: String, mysqlUser: String, mysqlPassword: String, kuduMaster: String, beginTime: Long, endTime: Long): Boolean = {

    try {
      Logger.getLogger("org").setLevel(Level.ERROR)
      Logger.getLogger("com").setLevel(Level.ERROR)
      val kuduContext = new KuduContext(kuduMaster, spark.sparkContext)

      def dbConnProperties(user: String, pass: String): Properties = {
        val ConnProperties = new Properties();
        ConnProperties.put("driver", mysqlDriver);
        ConnProperties.put("user", user);
        ConnProperties.put("password", pass);
        ConnProperties.put("fetchsize", "1000"); //读取条数限制
        ConnProperties.put("batchsize", "10000"); //写入条数限制
        return ConnProperties;
      }

      val readConnProperties = dbConnProperties(mysqlUser, mysqlPassword)
      val tableName = "sh_cmstile"
      val sql = "select \n\t`tile_id`,\n  `is_delete`,\n  `delete_time`,\n  `create_time`,\n  `update_time`,\n  `type`,\n  `inside_title`,\n  `title`,\n  `channel_list`,\n  `rank`,\n  `category`,\n  `intro`,\n  `pc_cover_image`,\n  `mobile_cover_image`,\n  `is_limit`,\n  `limit`,\n  `points_type`,\n  `points`,\n  `segment`,\n  `list_button_text`,\n  `required_tc`,\n  `short_tc_text`,\n  `short_tc_a_keyword`,\n  `short_tc_a_image`,\n  `complete_text`,\n  `share_text`,\n  `complete_extra_text`,\n  `complete_extra_a_keyword`,\n  `complete_extra_a_image`,\n  `cta_button_list`,\n  `lucky_text`,\n  `unlucky_text`,\n  `sku_code`,\n  `sku_name`,\n  `deliver_way`,\n  `gift_type`,\n  `gift_url`,\n  `question`,\n  `options`,\n  `is_sync_clm`,\n  `sync_clm_field`,\n  `ugc_type`,\n  `ugc_number`,\n  `duration`,\n  `video`,\n  `active_state`,\n  `is_come_out`,\n  `start_time`,\n  `publish_time`,\n  `come_out_time`,\n  `end_time`,\n  `referenced_material`,\n  `participation_people`,\n  `participation_number`,\n  `module_id`\nfrom t_cms_tile"

      var df: org.apache.spark.sql.DataFrame = null
      try {
        df = spark.read.jdbc(mysqlUrl, s"(${sql}) t", readConnProperties)
        val count = df.count();
        printf("Sync t_cms_tile query count: %d\n", count)
      } catch {
        case sslEx: javax.net.ssl.SSLException => {
          printf("sh_cmstile spark.read exception: " + sslEx.getMessage)
        }
        case ex: Exception => {
          printf("sh_cmstile spark.read exception: " + ex.getMessage)
        }
      }

      try {

        kuduContext.upsertRows(df, tableName)
      } catch {
        case sslEx: javax.net.ssl.SSLException => {
          printf("sh_cmstile kuduContext.upsertRows exception: " + sslEx.getMessage)
        }
        case ex: Exception => {
          printf("sh_cmstile kuduContext.upsertRows exception: " + ex.getMessage)
        }
      }

      return true
    } catch {
      case ex: Exception => {
        println("Sync t_cms_tile exception: " + ex.getMessage)
        return false
      }
    }
  }
}
