package com.d

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.collection.immutable

object sparkEnv {
  def main(args: Array[String]): Unit = {
    //执行环境
    val session = SparkSession.builder.appName("non").master("local").getOrCreate()
    import session.implicits._
    session.sparkContext.setLogLevel("ERROR")
    val url = "jdbc:mysql://47.107.249.203:3306/test_liu"
    val tableanme = "person_dahongmen"
    val prop = new Properties
    prop.setProperty("user", "root")
    prop.setProperty("password", "wxit2020")
    val frame: DataFrame = session.read.jdbc(url, tableanme, prop)
    frame.createOrReplaceTempView("map")
    val frameSql: DataFrame = session.sql("select * from map")

    frameSql.write.partitionBy("id")
      .option("header", "true")
      .format("com.databricks.spark.csv")
      .csv("C:\\Users\\Raichard\\Desktop\\a/b")
    println("完成！")
  }
}
