package com.bijie.ent_huaxiang

import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}



object bijie_year_operator_info {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val table1 = "company_base_info"
        val table2 = "year_operator_info"
        //company_name|turn_over|total_assets|production_value|time|identification|


        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        import spark.implicits._

        spark.read.jdbc(url, table1, props).createOrReplaceTempView("t1")

        spark.sql(
            """
              |select
              | company_name,
              | identification
              |from t1
              |
              |
            """.stripMargin).createOrReplaceTempView("t2") //todo 三千企业 三千id
        //-----------------------------------------------------------------------------------------------
        //2015
        spark.sql(
            """
              | select
              | 	company_name,
              | 	50+round(rand()*100) as talent_demand,
              | 	50+round(rand()*100) as companies_active,
              | 	50+round(rand()*100) as operational_efficiency,
              | 	50+round(rand()*100) as talent_flow,
              | 	50+round(rand()*100) as scientific_payoffs,
              | 	'2015' alter_time,
              | 	identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t11")

        //2016
        spark.sql(
            """
              | select
              | 	company_name,
              | 	50+round(rand()*100) as talent_demand,
              | 	50+round(rand()*100) as companies_active,
              | 	50+round(rand()*100) as operational_efficiency,
              | 	50+round(rand()*100) as talent_flow,
              | 	50+round(rand()*100) as scientific_payoffs,
              | 	'2016' alter_time,
              | 	identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t12")


        //2017
        spark.sql(
            """
              | select
              | 	company_name,
              | 	50+round(rand()*100) as talent_demand,
              | 	50+round(rand()*100) as companies_active,
              | 	50+round(rand()*100) as operational_efficiency,
              | 	50+round(rand()*100) as talent_flow,
              | 	50+round(rand()*100) as scientific_payoffs,
              | 	'2017' alter_time,
              | 	identification
              | from t2
              |
            """.stripMargin).createOrReplaceTempView("t13")

        //2018
        spark.sql(
            """
              | select
              | 	company_name,
              | 	50+round(rand()*100) as talent_demand,
              | 	50+round(rand()*100) as companies_active,
              | 	50+round(rand()*100) as operational_efficiency,
              | 	50+round(rand()*100) as talent_flow,
              | 	50+round(rand()*100) as scientific_payoffs,
              | 	'2018' alter_time,
              | 	identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t14")


        //2019
        spark.sql(
            """
              | select
              | 	company_name,
              | 	50+round(rand()*100) as talent_demand,
              | 	50+round(rand()*100) as companies_active,
              | 	50+round(rand()*100) as operational_efficiency,
              | 	50+round(rand()*100) as talent_flow,
              | 	50+round(rand()*100) as scientific_payoffs,
              | 	'2019' alter_time,
              | 	identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t15")

        //2015~2019
         spark.sql(
            """
select
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t11

union all
select
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t12

union all
select
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t13

union all
select
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t14

union all
select
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t15
            """.stripMargin).createOrReplaceTempView("t20")

        val df = spark.sql(
            """

select
     1 id,
 	company_name,
 	talent_demand,
 	companies_active,
 	operational_efficiency,
 	talent_flow,
 	scientific_payoffs,
 	alter_time,
 	identification
from t15


            """.stripMargin)

        //data.show()

       df.write.mode(SaveMode.Append).jdbc(url, table2, props)
        spark.close()
    }
}
