package com.software.process

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

object ReadData {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf = new SparkConf().setAppName("SparkSqlWriteDataExample").setMaster("local").set("spark.testing.memory", "2147480000")
    //2.创建SparkSession对象
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    var testtableDf: DataFrame = spark.read.format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/musicdb?serverTimezone=GMT%2B8")
      .option("driver", "com.mysql.cj.jdbc.Driver")
      .option("user", "root")
      .option("password", "lyf20020511")
      .option("dbtable", "tbl_comment_process_result")
      .load()
    testtableDf.createOrReplaceTempView("testtable")
    val result = spark.sql(
      """
        |select count(*),name from testtable group by name
        |""".stripMargin)

    result.show()

  }
}
