package org.example

import org.apache.spark
import org.apache.spark.sql.types.{DoubleType, IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SparkSession}

import java.util.Properties
object SparkSQLToMySQL {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("sparkBase")
      .getOrCreate()
    val sc = spark.sparkContext
    import spark.implicits._
    // 2.创建Properties对象，设置连接MySQL的用户名和密码
    val properties: Properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    properties.setProperty("driver", "com.mysql.jdbc.Driver")
    // 3.读取MySQL中的数据
    val mysqlDF = spark.read.jdbc("jdbc:mysql://localhost:3306/score?verifyServerCertificate=false&useSSL=false", "spark", properties)
    // 4.显示MySQL中表的数据
    mysqlDF.show()
    mysqlDF.printSchema()
    //对表中的内容进行操作的话，有两种方式，:
    //1. sql
    //2. api

    println("3. 以lambda(DSL) 过滤:")
    val filtered = mysqlDF.filter($"id" >= 2)
    filtered.show()
    //4. select
//    mysqlDF.select("name", "age").show()    //直接取列
//    dataFrame.select($"name"+"_", $"age"+1).show()   //加入运算
//    //5. withColumn  -> 添加新列
//    dataFrame.withColumn("ageAfter10", dataFrame("age") + 10).show()
//    //6. withColumnRenamed(existing, new)   ->重命名,且返回新的DataFrame
//    val newDataFrame = dataFrame.withColumnRenamed("age", "ageafter10")
//    newDataFrame.show()
    val sqlContext = new SQLContext(sc)
    val lines=sc.textFile("src/main/resources/person.txt")
    val personRDD=lines.map(line=>{
      val fields = line.split(",")
      val id = fields(0).toInt
      val name = fields(1)
      val age = fields(2).toInt
      val height = fields(3).toDouble

      Person(id,name,age,height)
    })
    import sqlContext.implicits._
    //    val df = personRDD.toDF()
    //    df.show()
//    val schema = StructType(List(
//      StructField("id",IntegerType,true),
//      StructField("name",StringType,true),
//      StructField("age",IntegerType,true),
//      StructField("height",DoubleType,true)
//    ))
//    val df = sqlContext.createDataFrame(personRDD,schema)
//
//    df.registerTempTable("t_person")
//    val resultDataFrame = sqlContext.sql("select * from t_person order by age desc,height desc,name desc")
//    resultDataFrame.show()


//    val df = personRDD.toDF()
//    df.show()
//
//        df.registerTempTable("t_person")
//        val resultDataFrame = sqlContext.sql("select * from t_person order by age desc,height desc,name desc")
////    val resultDataFrame = df.select("id","name","age","height").orderBy($"age" desc,$"height" desc,$"name" desc)
////
//    resultDataFrame.show()


    spark.stop()
  }
  case class Person(id:Integer,name:String,age:Integer,height:Double)
}
