package cn.ekgc.shopping

import org.apache.spark.sql.SparkSession

object MyApp  extends App {

  //先到hdfs 获取数据资源
  val data = CreateTables.getSources("hdfs://hadoop-master:9000/user/root/day23")

  //实例化spark 上下文
  val spark = SparkSession.builder().appName("shopping spark jobs").enableHiveSupport().getOrCreate()

  //便利资源生成hive 表
  data.foreach(e => {
      if(e._2 == "customers"){
        CreateTables.createTableCustomers(e._1,e._2,spark)
      }
    if(e._2 == "orders"){
      CreateTables.createTableOrder(e._1,e._2,spark)
    }
    if(e._2 == "order_items"){
      CreateTables.createTableOrderItem(e._1,e._2,spark)
    }
    if(e._2 == "products"){
      //CreateTables.createTableCustomers(e._1,e._2,spark)
    }
  })
  //统计没个州的总人口
  FenXi.preStatePeopleCount(spark)

  //TODO 统计每个州消费王
  //TODO 统计每个州的购物狂
  //TODO 统计每年中 最旺月

}
