package com.weic.spark.scala.hw

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * @Auther:BigData-weic
 * @ClassName:_03WorkSparkSql
 * @Date:2020/12/14 21:05
 * @功能描述:
 * @Version:1.0
 */
object _03WorkSparkSql {
	def main(args: Array[String]): Unit = {
		val spark = SparkSession.builder()
    		.appName("_03WorkSparkSql")
    		.master("local[*]")
    		.getOrCreate()

		import spark.implicits._
		val ssc = spark.sparkContext
		val lines:RDD[String] = ssc.textFile("file:\\F:\\datas\\work12_14\\topn.txt")
		val df = lines.map(_.split("\t"))
			.map(paras => (paras(0), paras(1), paras(2).trim().toInt))
			.toDF("course", "name", "score")
		df.createOrReplaceTempView("student")
		val sql =
			"""
			  |select *
			  |from(
			  |  select
			  |  course,name,score,
			  |  row_number() over(partition by course order by score desc) rank
			  |from student) tmp
			  |where tmp.rank <= 3
			  |""".stripMargin
		spark.sql(sql).show()

		val url="jdbc:mysql://localhost:3306/sparksql_01"
		val table = "student_01"
		val properties = new Properties()
		properties.put("user", "root")
		properties.put("password", "root")
		spark.sql(sql).write.jdbc(url, table, properties)
		spark.stop()
	}

}
