package cn.tecnova.Synchronous

import java.sql.Timestamp

import cn.tecnova.bean.{AppcPublicSentimentEvent, AppcUserSubject}
import cn.tecnova.utils.ConfigHandler
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:同步public_sentiment_event到ES
  **/
object PublicSentimentEvent2Es {

  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")

    val sc = new SparkContext(conf)
    val sqLContext = new SQLContext(sc)

    import sqLContext.implicits._
    val publicSentimentEventDF = sqLContext.read.jdbc(ConfigHandler.url,"public_sentiment_event",ConfigHandler.props)

    //导入写es相关的包
    import cn.tecnova.bean.IntString._
    import org.elasticsearch.spark.sql._

    val res4 = publicSentimentEventDF.map(row=>{
      AppcPublicSentimentEvent(
        row.getAs[String]("id"),
        row.getAs[String]("user_id"),
        row.getAs[String]("article_id"),
        row.getAs[String]("article_title"),
        row.getAs[Timestamp]("created_time").toString,
        row.getAs[Timestamp]("end_time").toString,
        row.getAs[String]("description"),
        row.getAs[String]("keywords"),
        row.getAs[Int]("run_flag").toStringPlusInt,
        row.getAs[Timestamp]("created_time").toString,
        row.getAs[String]("update_user_id"),
        row.getAs[Timestamp]("updated_time").toString,
        row.getAs[Int]("del_flag").toStringPlusInt
      )
    })
    res4.saveToEs("appc_public_sentiment_event" + "/appc_public_sentiment_event")

    sc.stop()

  }

}
