package cn.itcast.edu.analysis.batch

import cn.itcast.edu.bean.AnswerWithRecommendations
import org.apache.spark.SparkContext
import org.apache.spark.sql.{Dataset, SaveMode, SparkSession}

object BatchAnalysis {
  def main(args: Array[String]): Unit = {
    // TODO 0,准备环境
    val spark: SparkSession = SparkSession.builder().appName("sparksql").master("local[*]")
      .config("spark.sql.shuffle.partitions", "4") //本次测试时将分区数设置小一点,实际开发中可以根据集群规模调整大小,默认200
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // TODO 加载mysql
    val properties = new java.util.Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "root")
    val allInfoDS:Dataset[AnswerWithRecommendations] = spark.read.jdbc("jdbc:mysql://localhost:3306/gubanjie?useUnicode=true&characterEncoding=utf8", "t_recommended", properties)
      .as[AnswerWithRecommendations]

    // TODO 处理数据
    // TODO sql



  }
}
