package com.cmnit.analysis.common

import com.cmnit.analysis.util.ConfigurationManager
import org.apache.log4j.Logger
import org.apache.spark.sql.DataFrame

trait TController {
  private val logger: Logger = Logger.getLogger(this.getClass)

  /**
   * 输出数据到数据库
   *
   * @param tableName vertica表名
   * @param dataFrame 数据集
   */
  def write2vertica(tableName: String, dataFrame: DataFrame): Unit = {
    dataFrame.write
      .format("jdbc")
      .mode("append")
      .option("dbtable", tableName)
      .option("driver", ConfigurationManager.getProperty("vertica.driver"))
      .option("url", ConfigurationManager.getProperty("vertica.url"))
      .option("username", ConfigurationManager.getProperty("vertica.username"))
      .option("password", ConfigurationManager.getProperty("vertica.password"))
      .save
    logger.info(tableName + "插入完毕！")
  }

  /**
   * 输出数据到hdfs
   *
   * @param path      存储路径
   * @param acctTime  账期时间
   * @param dataFrame 数据集
   */
  def write2hdfs(path: String, acctTime: String, dataFrame: DataFrame): Unit = {
    // 其他格式的账期数据以CSV格式写入hdfs
    dataFrame.write
      .format("csv")
      .mode("append")
      .save(path + "/" + acctTime)
    logger.info("数据导出完毕！")
  }
}
