package com.software.util

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * 读取HBase数据源的工具类
 */
object DBTools {
  /**
   * 功能：获取Session对象
   * @param appname
   * @param master
   */
  def getSession(appname:String,master:String) :SparkSession= {
    // 创建SparkSession对象
    val spark = SparkSession.builder()
      .appName("ColumnMean")
      .master("local")
      .config("spark.testing.memory", "2147480000")
      .getOrCreate()
    spark
  }

  def redFile(filetype:String,path:String,spark: SparkSession):DataFrame  ={
    val airData: DataFrame = spark.read.format(filetype)
      .option("header", true)
      .option("multiLine", true)
      .load(path)
    airData
  }

  /**
   * 分析结果写入MySQL数据库
   * @param tableName
   * @param result
   */
  def WriteMySql(tableName:String,result:DataFrame,saveMode: SaveMode): Unit ={
    result.write
      .format("jdbc")
      .option("url","jdbc:mysql://localhost:3306/AirDB?serverTimezone=GMT%2B8")
      .option("driver","com.mysql.cj.jdbc.Driver")
      .option("user","root")
      .option("password","lyf20020511")
      .option("dbtable",tableName)
      .mode(saveMode)
      .save()
  }
}
