package Lothar

import org.apache.spark.sql.SparkSession
import org.elasticsearch.hadoop.cfg.ConfigurationOptions

object ES_2_Hive {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("ES 2 Hive")
      //设置ES连接参数
      .config(ConfigurationOptions.ES_NODES, "master")
      .config(ConfigurationOptions.ES_PORT, "9200")
      //设置Hive连接参数
      .config("hive.metastore.uris", "thrift://master:9083")
      .config("spark.sql.warehouse.dir", "hdfs://master:8020/usr/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    //读取ES中的数据(可视情况修改es.query来决定抽取数据量）
    val media_index = spark.read.format("org.elasticsearch.spark.sql")
      .option("es.nodes", "192.168.10.110").option("es.query", "?q=*:*")
      .option("es.read.field.as.array.include", "vod_cat_tags")
      .load("media_index/media")

    val billevent = spark.read.format("org.elasticsearch.spark.sql")
      .option("es.nodes", "192.168.10.110").option("es.query", "?q=*:*").load("billevents/mmconsume")

    val orderData = spark.read.format("org.elasticsearch.spark.sql").option("es.nodes", "192.168.10.110")
      .option("es.query", "?q=*:*").load("order_index/order")

    val usermsg = spark.read.format("org.elasticsearch.spark.sql")
      .option("es.nodes", "192.168.10.110").option("es.query", "?q=*:*").load("usermsg/mediamatch")

    val userevent = spark.read.format("org.elasticsearch.spark.sql")
      .option("es.nodes", "192.168.10.110").option("es.query", "?q=*:*").load("userevent/mediamatch")

    //检查数据是否有误
    //    media_index.show()
    //    billevent.show()
    //    orderData.show()
    //    usermsg.show()
    //    userevent.show()

    //将数据存入Hive中
    if (spark.sql("show databases").filter("databaseName == 'portrait'").isEmpty) {
      spark.sql("create database portrait")
    }

    media_index.write.mode("overwrite").saveAsTable("portrait.media_index")
    billevent.write.mode("overwrite").saveAsTable("portrait.billevent")
    orderData.write.mode("overwrite").saveAsTable("portrait.orderData")
    usermsg.write.mode("overwrite").saveAsTable("portrait.usermsg")
    userevent.write.mode("overwrite").saveAsTable("portrait.userevent")

    spark.stop()

  }
}
