package com.galeno.sparksql

import org.apache.spark.sql.types.{DataTypes, DateType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import java.util.Properties

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/414:46
 */
object C03 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local").enableHiveSupport().appName("C02").getOrCreate()
    val sc = spark.sparkContext
    val rdd5 = sc.makeRDD(Seq(
      Row(1, "zs", 18),
      Row(1, "zs", 18),
      Row(1, "zs", 18),
      Row(1, "zs", 18),
      Row(1, "zs", 18)
    ))
    val structType: StructType = StructType((
      Seq(
        StructField("id", DataTypes.IntegerType),
        StructField("name", DataTypes.StringType),
        StructField("age", DataTypes.IntegerType)
      )
      ))

    //映射nysql表中
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "root")
    val df1: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/spark", "battel",properties)
    df1.printSchema()
    df1.show(100, false)

    spark.sql(
      """
        |
        |select
        |
        |
        |from stu
        |
        |
        |
        |""".stripMargin).show()




  }

}
