package com.indicatorcalculation.lk

import org.apache.spark.sql.SparkSession

object IndicatorCalculation01 {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("IndicatorCalculation01")
      .config("spark.sql.warehouse.dir","hdfs://master:9000/user/hive/warehouse")
      .enableHiveSupport()
      .getOrCreate()

    spark.sqlContext.sql("use dwd")
    //将customer表中的数据放入临时表中
    spark.sqlContext.sql(
      """
        |select
        |CUSTKEY,
        |NAME,
        |ADDRESS,
        |NATIONKEY,
        |PHONE,
        |ACCTBAL,
        |MKTSEGMENT,
        |COMMENT
        |from customer_lk
        |""".stripMargin).createOrReplaceTempView("customer_lk_dwd")
    //将order表中的数据放入临时表中
    spark.sqlContext.sql(
      """
        |select
        |ORDERKEY,
        |CUSTKEY,
        |ORDERSTATUS,
        |TOTALPRICE,
        |ORDERDATE,
        |ORDERPRIORITY,
        |CLERK,
        |SHIPPRIORITY,
        |COMMENT
        |from orders_lk
        |""".stripMargin).createOrReplaceTempView("orders_lk_dwd")

    //两表联合，查找出消费最高的名字
    spark.sqlContext.sql(
      """
        |select
        |name,
        |totalprice
        |from customer_lk_dwd full outer join orders_lk_dwd on
        |(customer_lk_dwd.CUSTKEY=orders_lk_dwd.CUSTKEY)
        |order by totalprice desc
        |""".stripMargin).show()


  }

}
