/*
package com.zetatech.bdp.writer.hive

import java.net.URI
import java.sql.DriverManager
import java.util
import java.util.Date

import com.zetatech.bdp.entity.{ColumnDef, HiveWriter, Writer}
import com.zetatech.bdp.writer.WriterHandler
import com.zetatech.bdp.writer.hive.utils.DateUtils.TARGET_TIME_FORMAT
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataOutputStream, FileSystem, Path}

import scala.collection.JavaConversions._

/**
  *
  * @author cheng.cao@zetatech.com.cn
  */
class HiveWriterHandler extends WriterHandler {

  /**
    * 将数据持久到存储器中
    *
    * @param writer     存储器中的属性定义(eg:rdbms/hive/hbase/kudu)
    * @param columnDefs 所有列名称和列类型定义(注意:columnDefs和data中的顺序没有任何关系)
    * @param datas      数据，Map<String,String> key：列名，value:值
    */
  override def write(writer: Writer, columnDefs: util.List[ColumnDef], datas: util.List[util.Map[String, String]]): Unit = {

    val hiveWriter = writer.asInstanceOf[HiveWriter]

    //schema(所有)
    val listSchames = new util.ArrayList[String]
    for (colum <- columnDefs) {
      listSchames.add(colum.getName)
    }

    val totallSbData = new StringBuilder()
    val lineData = new StringBuilder()

    for (line <- datas) {

      for (key <- listSchames) {

        try {
          lineData.append(line.get(key)).append("|")
        } catch {
          case e: Exception => println(e)
        }
      }
      //处理最后一条数据
      totallSbData.append(lineData.dropRight(1))
      totallSbData.append("\n")
      lineData.clear()
    }


    val startHDSFTime = System.currentTimeMillis()

    /**
      * 生成上传到HDFS的目录
      */
    val path = generateHdfsPath(hiveWriter)

    /**
      * 上传到HDFS
      */
    saveToHDFS(hiveWriter, totallSbData, path)
    val endHDFSTime = System.currentTimeMillis()
    val hdfsTime = (endHDFSTime - startHDSFTime) / 1000
    println("Save TO HDFS consuming：" + hdfsTime + "s")


    val startHiveTime = System.currentTimeMillis()

    /**
      * load 到Hive
      */
    loadToHive(hiveWriter, path)
    val endHiveTime = System.currentTimeMillis()
    val hiveTime = (endHiveTime - startHiveTime) / 1000
    println("Load TO Hive consuming：" + hiveTime + "s")

    val totalTime = (endHiveTime - startHDSFTime) / 1000
    println("Total consuming：" + totalTime + "s")

  }

  /**
    * 上传到HDFS
    *
    * @param hiveWriter   配置参数
    * @param totallSbData 数据
    */
  private def saveToHDFS(hiveWriter: HiveWriter, totallSbData: StringBuilder, path: String) = {
    //获取HDFS存储路径
    val hdfsUrl = hiveWriter.getDefaultFS
    val dataBase = hiveWriter.getDatabase
    val outPutPath = path

    val configuration = new Configuration
    configuration.set("dfs.replication", "1")
    val uri = new URI(hdfsUrl)
    val fileSystem = FileSystem.get(uri, configuration, "hadoop")
    var out: FSDataOutputStream = null
    //write to hdfs
    try {
      if (!fileSystem.exists(new Path(outPutPath))) {
        out = fileSystem.create(new Path(outPutPath))
      } else {
        out = fileSystem.append(new Path(outPutPath))
      }
      out.write(totallSbData.toString.getBytes)
    } catch {
      case e: Exception => println(e)
    }
    out.flush()
    out.close()
  }

  /**
    *
    */
  private def loadToHive(hiveWriter: HiveWriter, path: String) = {

    val conf = hiveWriter.getConf
    // val driveName = conf.get("driverName")
    val hiveJdbcUrl = conf.get("hiveJdbcUrl")
    var dataBase = hiveWriter.getDatabase
    val table = hiveWriter.getTable
    val user = conf.get("user")
    val pwd = conf.get("password")


    // Class.forName(driveName)
    Class.forName("org.apache.hive.jdbc.HiveDriver")
    //TODO 修改连接池
    val conn = DriverManager.getConnection(hiveJdbcUrl, user, pwd)
    val stmt = conn.createStatement()

    if (dataBase == null || "".equals(dataBase)) {
      dataBase = "default"
    }

    if (path.contains("local")) {
      stmt.execute(s"load data  local inpath  '$path' into table $dataBase.$table")
    } else {
      stmt.execute(s"load data inpath '$path' into table $dataBase.$table")
    }

    stmt.close()
    conn.close()
  }

  /**
    * 生产上传到 HDFS路径
    *
    * @return
    */
  def generateHdfsPath(hiveWriter: HiveWriter) = {
    //路径格式 path:=/root/database/table/tmp/yyyyMMddHHmmSS.txt/parquet
    val conf = hiveWriter.getConf
    // val driveName = conf.get("driverName")
    val hiveJdbcUrl = conf.get("hiveJdbcUrl")
    var dataBase = conf.get("dataBase")
    if (dataBase == null || "".equals(conf.get(dataBase))) {
      dataBase = "default"
    }
    var table = conf.get("table")
    if (table == null || "".equals(conf.get(table))) {
      table = "test"
    }

    var storeType = hiveWriter.getStore
    if (storeType == null || "".equals(storeType)) {
      storeType = "txt"
    }

    var path = hiveWriter.getConf.get("path") //hdfs路径
    if (path == null || "".equals(path)) {

      path = s"/root/${dataBase}/${table}/tmp/"
    }
    val sysTime = System.currentTimeMillis()
    val date = new Date(sysTime)
    val formatTime = TARGET_TIME_FORMAT.format(date)
    if (path.endsWith("/")) {
      path = path + formatTime + "." + storeType
    } else {
      path = path + "/" + formatTime + "." + storeType

    }
    path
  }
}
*/
