package com.bigdata.topN



import org.apache.spark.sql.{DataFrame, DataFrameNaFunctions, SparkSession}

object GetDatas {
  val JDBC_SOURCE = "jdbc"
  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder().appName("RegistryData").master("local").getOrCreate()
    // 加载MySQL表数据
    val userBrrowseDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_BROWSE).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
    val userDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_USER).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
    val articleDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_ARTICLE).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
    val commentDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_COMMENT).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
    val supportDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_SUPPORT).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
    val tagDF = session.read.format(JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", Constants.TABLE_TAG).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()

    articleDF.show()
    articleDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
      + Constants.TIME_GENERATOR+ "-"+Constants.TABLE_ARTICLE)
    userBrrowseDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
        +Constants.TIME_GENERATOR+ "-"+Constants.TABLE_BROWSE)
    userDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
        +Constants.TIME_GENERATOR+ "-"+Constants.TABLE_USER)
    commentDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
        +Constants.TIME_GENERATOR+ "-"+Constants.TABLE_COMMENT);
    supportDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
        +Constants.TIME_GENERATOR+ "-"+ Constants.TABLE_SUPPORT);
    tagDF.coalesce(1).write
      .save(Constants.HADOOP_STORAGE
        +Constants.TIME_GENERATOR+ "-"+ Constants.TABLE_TAG);

    session.stop()
  }

  def getDF(tbName: String, session:SparkSession): DataFrame ={
    session.read.format(GetDatas.JDBC_SOURCE).option("url", Constants.JDBC_URL).option("dbtable", tbName).option("user", Constants.JDBC_USER).option("password", Constants.JDBC_USER).option("driver", Constants.JDBC_DRIVER).load()
  }
//  def save

}
