package e_commerce
//统计用户行为数据
import org.apache.spark.sql.SaveMode

object task8 {
  def main(args: Array[String]): Unit = {
    import org.apache.spark.sql.SparkSession
    val spark=SparkSession.builder()
      .appName("date_behavior")
      .master("spark://niit-master:7077")
      .config("hive.metastore.uris","thrift://niit-master:9083")
      //      .config("spark.driver.host","10.10.4.28")
      .enableHiveSupport()
      .getOrCreate()

    // 从mysql数据库读取数据
    val csvDF1 = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/sem7_sparkpj")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "user")
      .load()

    //建立视图
    csvDF1.createOrReplaceTempView("user")

    //sparksql语句
    val result =
      """
        |SELECT
        |DATE(time) as date,
        |count(case when behavior_type=3 then 1 end) as add_cart,
        |count(case when behavior_type=2 then 1 end) as favorite,
        |count(case when behavior_type=4 then 1 end) as purchase
        |from user
        |GROUP BY date
        |ORDER BY date
        |""".stripMargin
    val df = spark.sql(result)
    df.show(false)

    //将结果保存到mysql数据库
    df.write
      .format("jdbc")
      .option("url", "jdbc:mysql://niit-master:3306/sem7_sparkpj")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "result8")
      .mode(SaveMode.Overwrite)
      .save()

    //关闭环境
    spark.close()
  }
}
