package etl

import java.util.Properties

import config.ConfigHelper
import org.apache.spark.sql.SparkSession

/**
  * 从mysql数据库中的表 User_charge提取数据到 hdfs上的目录下 如：/charge_info
  */
object Mysql2Hdfs {

  def main(args: Array[String]): Unit = {
    System.setProperty("user","root")
    System.setProperty("password","123456")

    val session = SparkSession.builder().master("local[*]").appName("充值数据从mysql到hdfs").getOrCreate()
    val property = new Properties()
    property.setProperty("user",ConfigHelper.mysql_name)
    property.setProperty("password",ConfigHelper.mysql_password)
    val frame = session.read.jdbc(ConfigHelper.mysql_url,"User_charge",property)
    frame.write.parquet(ConfigHelper.hdfsPath)

    session.stop()

  }

}
