package site.yunnong.atvris.recommend.offline.spark.featureing

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.{Dataset, Row, SQLContext, SaveMode, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import site.yunnong.atvris.common.enumeration.{MediaStatusEnum, MediaTypeEnum}

import java.io.{BufferedWriter, File, FileWriter}
import java.util.Properties


/**
 *
 *
 * @author zjh
 * @date 2021/9/16 10:01
 */
object ScalaBuildData {

  def readMySQL(sparkSession: SparkSession): Dataset[Row] = {
    //jdbc.url=jdbc:mysql://localhost:3306/database
    val url = "jdbc:mysql://localhost:3306/yunnong?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowMultiQueries=true"
    //查找的表名
    val table = "video"
    //增加数据库的用户名(user)密码(password),指定test数据库的驱动(driver)
    val connectionProperties = new Properties
    connectionProperties.put("driver", "com.mysql.cj.jdbc.Driver")
    connectionProperties.put("user", "root")
    connectionProperties.put("password", "zjhzjh802200")

    //SparkJdbc读取Postgresql的products表内容
    System.out.println("读取yunnong数据库中的表内容video")
    // 读取表中所有数据
    val jdbcDF = sparkSession.read.jdbc(url, table, connectionProperties).select("*")
      .where("status = " + MediaStatusEnum.APPROVED.getValue)
    //显示数据
        jdbcDF.show()
    jdbcDF;
  }

  def writeMySQL(df: Dataset[Row], outputFilename: String) = {
    df.show(20)
//    val folderPath = this.getClass.getResource("/realdata")
//    val file = folderPath.getPath + outputFilename
//    
//    df.repartition(1).write.option("header", "true").mode(SaveMode.Overwrite).csv(file);

    val embFolderPath = this.getClass.getResource("/realdata/")
    val file = new File(embFolderPath.getPath + outputFilename)
    val bw = new BufferedWriter(new FileWriter(file))
    df.collect().foreach(video => {
        println(video)
      bw.write(video + "\n")
    })
    bw.close()
  }

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR);
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("mysql")
      .set("spark.sql.shuffle.partitions", "1");

    val spark = SparkSession.builder().config(conf).getOrCreate();
    //读取mysql数据
    val df = readMySQL(spark);

    writeMySQL(df, "videos.csv");
    //停止SparkContext
    spark.stop();
  }

}
