package e_commerce

import org.apache.spark.sql.SaveMode

object task10 {
  def main(args: Array[String]): Unit = {
    import org.apache.spark.sql.SparkSession
    val spark=SparkSession.builder()
      .appName("daily_hourly")
      .master("spark://niit-master:7077")
      .config("hive.metastore.uris","thrift://niit-master:9083")
      //      .config("spark.driver.host","10.10.4.28")
      .enableHiveSupport()
      .getOrCreate()


    // 从mysql数据库读取数据
    val csvDF1 = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/sem7_sparkpj")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "user")
      .load()

    //建立视图
    csvDF1.createOrReplaceTempView("user")

    //sparksql语句
    val result1 =
    """
        |SELECT t.time_hour,FLOOR(t.pv/28) as pv
        |FROM
        |(SELECT HOUR(time) as time_hour,count(*) as pv
        |FROM `user`
        |WHERE DATE(time) not in ('2014-12-11', '2014-12-12', '2014-12-13')
        |and behavior_type='1'
        |GROUP BY time_hour
        |) as t
        |GROUP BY time_hour,t.pv
        |ORDER BY time_hour
        |""".stripMargin
    val result2=
      """
        |SELECT t.time_hour,
        |FLOOR(t.add_cart/28) as add_cart,
        |FLOOR(t.favorite/28) as favorite,
        |FLOOR(t.purchase/28) as purchase
        |FROM
        |(SELECT
        |HOUR(time) as time_hour,
        |count(case when behavior_type=3 then 1 end) as add_cart,
        |count(case when behavior_type=2 then 1 end) as favorite,
        |count(case when behavior_type=4 then 1 end) as purchase
        |from user
        |WHERE DATE(time) not in ('2014-12-11', '2014-12-12', '2014-12-13')
        |GROUP BY time_hour
        |) as t
        |GROUP BY time_hour,t.add_cart,t.favorite,t.purchase
        |ORDER BY time_hour
        |""".stripMargin
    val df1 = spark.sql(result1)
    val df2 = spark.sql(result2)
    df1.show(false)
    df2.show(false)
    //将结果保存到mysql数据库
    df1.write
      .format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/sem7_sparkpj")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "result10_1")
      .mode(SaveMode.Overwrite)
      .save()
    df2.write
      .format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/sem7_sparkpj")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "result10_2")
      .mode(SaveMode.Overwrite)
      .save()

    //关闭环境
    spark.close()
  }
}
