package com.wenge.datagroup.storage.util

import com.alibaba.fastjson.JSONObject
import com.wenge.datagroup.storage.util.MysqlUtil.LOG
import org.apache.commons.lang3.StringUtils
import org.apache.kafka.common.TopicPartition
import org.apache.log4j.Logger

import java.sql.{Connection, ResultSet, Statement}
import java.util
import java.util.concurrent.ConcurrentHashMap

import scala.collection.mutable

/** Mysql */
object MysqlUtil {

  private val LOG = Logger.getLogger(MysqlUtil.getClass.getName)

  def getSensiWords: Predef.Set[String] = {
    import scala.collection.mutable.Set
    val words: Set[String] = Set()
    var connection: Connection = null
    try {
      connection = DruidDataSourceUtil.getConnection
      val statement: Statement = connection.createStatement()
      val resultSet =
        statement.executeQuery("select word  from labeling_sensitive_words where status = 1")
      while (resultSet.next()) {
        val word = resultSet.getString("word")
        words += word
      }
    } catch {
      case e => e.printStackTrace
      // case _: Throwable => println("ERROR")
    } finally {
      if (null != connection)
        connection.close()
    }

    words.toSet
  }
  //and now() < SUBDATE (update_time,interval -7 DAY)

  def getOffset(groupId: String, topic: String) = {
    // 查询mysql中是否有偏移量
    val sqlProxy = new SqlProxy()
    val offsetMap = new mutable.HashMap[TopicPartition, Long]()
    val client = DruidDataSourceUtil.getConnection
    try {
      sqlProxy.executeQuery(
        client,
        "select * from `labeling_kafka_offset` where groupid=? and topic =? and now() < SUBDATE(update_time,interval -7 DAY)",
        Array(groupId, topic),
        new QueryCallback {
          override def process(rs: ResultSet): Unit = {
            while (rs.next()) {
              val model = new TopicPartition(rs.getString(2), rs.getInt(3))
              val offset = rs.getLong(4)
              offsetMap.put(model, offset)
            }
            rs.close() // 关闭游标
          }
        }
      )
    } catch {
      case e: Exception =>
        e.printStackTrace()
        LOG.error(e.toString)
    } finally {
      sqlProxy.shutdown(client)
    }
    LOG.info("读取的偏移量 = " + offsetMap)
    offsetMap
  }


  /**
   * 获取网站名称 新版
   * @param orgRecord
   *   旧记录
   * @param map
   *   新纪录
   * @param hostName
   *   不同渠道域名对应字段名
   */
  def getIsnNew(
      orgRecord: JSONObject,
      map: ConcurrentHashMap[String, Any],
      hostName: String): ConcurrentHashMap[String, Any] = {
    // 如果旧字段中host所属字段有值，取该值，否则取新的值
    val orgHostValue: String = orgRecord.getString(hostName)
    val hostValue: String = map.get(hostName).asInstanceOf[String]
    val subHost = if (StringUtils.isNotBlank(orgHostValue)) orgHostValue else hostValue
    if (StringUtils.isNotBlank(subHost)) {
      val sqlProxy = new SqlProxy()
      val client = DruidDataSourceUtil.getConnection
      try {
        sqlProxy.executeQuery(
          client,
          "select auto_id,name from `centre_collect`.`host_info` where host=?",
          Array(subHost),
          new QueryCallback {
            override def process(rs: ResultSet): Unit = {
              // 判断对应数据库中是否有值
              if (rs.isBeforeFirst) {
                while (rs.next()) {
                  val iSid = rs.getLong(1)
                  val iSn = rs.getString(2)
                  map.put("i_sid", iSid)
                  map.put("i_sn", iSn)
                }
                rs.close() // 关闭资源
              } else {
                // 往数据库填充
                val cnt = sqlProxy.executeUpdate(
                  client,
                  "insert into `centre_collect`.`host_info` (`host`,`name`) values(?,?)",
                  Array(subHost, subHost))
                // 查询并取生成的auto_id返回 优化
                sqlProxy.executeQuery(
                  client,
                  "select auto_id,name from `centre_collect`.`host_info` where host=?",
                  Array(subHost),
                  new QueryCallback {
                    override def process(rs: ResultSet): Unit = {
                      while (rs.next()) {
                        val iSid = rs.getLong(1)
                        val iSn = rs.getString(2)
                        map.put("i_sid", iSid)
                        map.put("i_sn", iSn)
                      }
                      rs.close()
                    }
                  }
                )
              }
            }
          }
        )
      } catch {
        case e: Exception =>
          e.printStackTrace()
          LOG.error(e.toString)
      } finally {
        sqlProxy.shutdown(client)
      }
    } else {
      map.put("i_sid", 0)
      map.put("i_sn", StringUtils.EMPTY)
    }
    map
  }


  def getAllNameInfo: util.HashMap[String, String] = {

    // 查询mysql中是否有偏移量
    val sqlProxy = new SqlProxy()
    val client = DruidDataSourceUtil.getConnection
    val resultMap: util.HashMap[String, String] = new util.HashMap[String, String]
    try {
      sqlProxy.executeQuery(
        client,
        "select name,shortened_name from media_name",
        null,
        new QueryCallback {
          override def process(rs: ResultSet): Unit = {
            while (rs.next()) {
              resultMap.put(rs.getString(1), rs.getString(2))
            }
            rs.close() // 关闭游标
          }
        }
      )
      return resultMap
    } catch {
      case e: Exception => e.printStackTrace()
    } finally {
      sqlProxy.shutdown(client)
    }
    null
  }

}
