package ds_recommended.self_chuli

import org.apache.spark.sql.SparkSession

import java.util.Properties

object order_he_shop {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("aa")
      .getOrCreate()


    val connect = new Properties()
    connect.setProperty("user", "root")
    connect.setProperty("password", "123456")
    connect.setProperty("driver", "com.mysql.jdbc.Driver")


    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false", "shop_info", connect)
      .createOrReplaceTempView("shop")

    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false", "order_info", connect)
      .createOrReplaceTempView("order")


    val result=spark.sql(
      """
        |select
        |o.*,s.class_name,s.title,s.price
        |from order as o
        |join shop as s
        |on s.product_id=o.product_id
        |""".stripMargin)


    result.show(5)

    result.write.mode("overwrite")
      .option("batchsize", "100000") // 每次插入 1000 条记录
      .option("isolationLevel", "NONE")
      .option("autocommit", "false")
      .jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","user_and_shop",connect)

//    result.createOrReplaceTempView("result")

//    spark.read.jdbc("jdbc:mysql://192.168.67.193:3307/ods?useSSL=false","user_and_shop",connect)
//      .createOrReplaceTempView("r1")
//
//    spark.sql(
//      """
//        |select
//        |*
//        |from result
//        |where result.
//        |""".stripMargin)




    spark.close()
  }

}
