package Lothar

import Lothar.Step1.spark
import org.apache.spark.sql.SparkSession

object Step3 {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .config("hive.metastore.uris", "thrift://master:9083")
      .config("spark.sql.warehouse.dir", "hdfs://master:8020/usr/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    var MediaIndexDF= spark.read.table("portrait.MediaIndex1")
    val BilleventDF = spark.read.table("portrait.Billevent1")
    val OrderDF = spark.read.table("portrait.Order1")
    val UserMsgDF = spark.read.table("portrait.UserMsg1")
    val UserEventDF = spark.read.table("portrait.UserEvent1")

    // 毕竟数据清洗的要求是意义昂的这里就不继续写了


  }
}
