package chapter14

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.spark.sql.types.{StringType, StructType}
import org.apache.spark.sql.{Row, SparkSession}

/**
 * author: yuhui
 * descriptions:
 * date: 2024 - 10 - 26 3:50 下午
 *
 * drop table view_pv;
 * CREATE TABLE view_pv(
 * dt char(20) not null primary key,
 * pv int
 * )ENGINE=InnoDB DEFAULT CHARSET=utf8;
 *
 * SELECT * from view_pv;
 *
 */

object DataViewPV {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    val schema = new StructType()
      .add("dt", StringType)
      .add("name", StringType)
      .add("url", StringType)

    val lines = spark.sparkContext.textFile("doc/output_buffered.txt")
    //将RDD关联了Schema，但是依然是RDD
    val row = lines.map(line => {
      val fields = line.split(",")
      Row(fields(0), fields(1), fields(2))
    })

    val df = spark.createDataFrame(row, schema)

    df.show()

    df.createTempView("dataTable")

    val view_pv = spark.sql(
      """
        |
        | select
        |    dt ,
        |    cast(count(name) as int) as pv
        | from dataTable group by dt
        |
        |""".stripMargin)

    //5、把数据保存到mysql表中
    view_pv.rdd.foreach(line => {
      //每条数据与mysql建立连接
      //把数据插入到mysql表操作
      //1、获取连接
      val connection: Connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/ngn", "root", "123456")

      //2、定义插入数据的sql语句
      val sql = "insert into view_pv(dt,pv) values(?,?)"

      //3、获取PreParedStatement

      try {
        val ps: PreparedStatement = connection.prepareStatement(sql)

        //4、获取数据,给？号 赋值
        ps.setString(1, line.getAs[String](0))
        ps.setInt(2, line.getAs[Int](1))
        //执行
        ps.execute()
      } catch {
        case e: Exception => e.printStackTrace()
      } finally {
        if (connection != null) {
          connection.close()
        }
      }
    })
  }
}