package etl

import java.util.Properties

import config.ConfigHelper
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * 1. 充值数据导入到mysql数据库中的一个表中，表名字可以叫 User_charge
  */
object Data2Mysql {
  def main(args: Array[String]): Unit = {

    val session = SparkSession.builder().master("local[*]").appName("黑卡项目").getOrCreate()
    val file: DataFrame = session.read.csv("C:\\Users\\Administrator\\Desktop\\charge_data2.csv")
    val rename = file.toDF("gameid","gameserverid","amount","userid","mac","notifytime","addpoint","ichannel","mac_flag")
    val property = new Properties()
    property.setProperty("user",ConfigHelper.mysql_name)
    property.setProperty("password",ConfigHelper.mysql_password)
    rename.write.jdbc(ConfigHelper.mysql_url,"User_charge",property)
    session.close()

  }

}
