package com.bijie.ent_huaxiang

import java.util.Properties

import org.apache.spark.sql.{Dataset, Encoders, SaveMode, SparkSession}

object bijie_company_year_total {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val table1 = "company_base_info"
        val table2 = "company_year_total"
        //company_name|turn_over|total_assets|production_value|time|identification|


        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        import spark.implicits._

        spark.read.jdbc(url, table1, props).createOrReplaceTempView("t1")

        spark.sql(
            """
              |select
              | company_name,
              | identification
              |from t1
              |
              |
            """.stripMargin).show()//.createOrReplaceTempView("t2") //todo 三千企业 三千id

        //2015
//        spark.sql(
//            """
//              |select
//              |  company_name,
//              |  500000+ round(rand()*10) as turn_over,
//              |  500000+round(rand()*10)  as total_assets,
//              |  500000+round(rand()*10)  as production_value,
//              |  '2015' time,
//              |  identification
//              |from t2
//              |
//              |
//            """.stripMargin).createOrReplaceTempView("t11")
//
//        //2016
//        spark.sql(
//            """
//              |select
//              |  company_name,
//              |  500000+ round(rand()*10) as turn_over,
//              |  500000+round(rand()*10)  as total_assets,
//              |  500000+round(rand()*10)  as production_value,
//              |  '2016' time,
//              |  identification
//              |from t2
//              |
//              |
//            """.stripMargin).createOrReplaceTempView("t12")
//
//
//        //2017
//        spark.sql(
//            """
//              |select
//              |  company_name,
//              |  500000+ round(rand()*10) as turn_over,
//              |  500000+round(rand()*10)  as total_assets,
//              |  500000+round(rand()*10)  as production_value,
//              |  '2017' time,
//              |  identification
//              |from t2
//              |
//              |
//            """.stripMargin).createOrReplaceTempView("t13")
//
//        //2018
//        spark.sql(
//            """
//              |select
//              |  company_name,
//              |  500000+ round(rand()*10) as turn_over,
//              |  500000+round(rand()*10)  as total_assets,
//              |  500000+round(rand()*10)  as production_value,
//              |  '2018' time,
//              |  identification
//              |from t2
//              |
//              |
//            """.stripMargin).createOrReplaceTempView("t14")
//
//
//        //2019
//        spark.sql(
//            """
//              |select
//              |  company_name,
//              |  500000+ round(rand()*10) as turn_over,
//              |  500000+round(rand()*10)  as total_assets,
//              |  500000+round(rand()*10)  as production_value,
//              |  '2019' time,
//              |  identification
//              |from t2
//              |
//              |
//            """.stripMargin).createOrReplaceTempView("t15")
//
//        //2015~2019
//        val df = spark.sql(
//            """
//select
//  company_name,
//  turn_over,
//  total_assets,
//  production_value,
//  time,
//  identification
//from t11
//
//union all
//select
//  company_name,
//  turn_over,
//  total_assets,
//  production_value,
//  time,
//  identification
//from t12
//
//union all
//select
//  company_name,
//  turn_over,
//  total_assets,
//  production_value,
//  time,
//  identification
//from t13
//
//union all
//select
//  company_name,
//  turn_over,
//  total_assets,
//  production_value,
//  time,
//  identification
//from t14
//
//union all
//select
//  company_name,
//  turn_over,
//  total_assets,
//  production_value,
//  time,
//  identification
//from t15
//
//            """.stripMargin)
//
//
//
//
//
//
//
//
//
//
//        //data.show()
//
//
//
//
//       df.write.mode(SaveMode.Overwrite).jdbc(url, table2, props)
          spark.close()
    }
}
case class User(id:String, age:Int )