package com.bijie.ent_huaxiang

import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}

object bijie_position_person_inifo {

    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val table1 = "company_base_info"
        val table2 = "position_person_inifo"
        //company_name|turn_over|total_assets|production_value|time|identification|


        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        spark.read.jdbc(url, table1, props).createOrReplaceTempView("t1")

        spark.sql(
            """
              |select
              | company_name,
              | identification
              |from t1
              |
              |
            """.stripMargin).createOrReplaceTempView("t2") //todo 三千企业 三千id
        //-----------------------------------------------------------------------------------------------
        //2015
        spark.sql(
            """
              |select
              |	company_name,
              |	identification,
              |	'普工' position,
              |	cast(round(rand()*9) as decimal(4,0)) as person_total,
              |	cast(round(rand()*9) as decimal(4,0)) as local_place,
              |	cast(round(rand()*9) as decimal(4,0)) as out_place,
              |	cast(round(rand()*9) as decimal(4,0)) as talents_into,
              |	cast(round(rand()*9) as decimal(4,0)) as college_graduates,
              |	current_date() as alter_time
              |from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t11")

        //2016
       spark.sql(
            """
              |select
              |	company_name,
              |	identification,
              |	'工程师' position,
              |	cast(round(rand()*9) as decimal(4,0)) as person_total,
              |	cast(round(rand()*9) as decimal(4,0)) as local_place,
              |	cast(round(rand()*9) as decimal(4,0)) as out_place,
              |	cast(round(rand()*9) as decimal(4,0)) as talents_into,
              |	cast(round(rand()*9) as decimal(4,0)) as college_graduates,
              |	current_date() as alter_time
              |from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t12")


        //2017
        spark.sql(
            """
              |select
              |	company_name,
              |	identification,
              |	'程序员' position,
              |	cast(round(rand()*9) as decimal(4,0)) as person_total,
              |	cast(round(rand()*9) as decimal(4,0)) as local_place,
              |	cast(round(rand()*9) as decimal(4,0)) as out_place,
              |	cast(round(rand()*9) as decimal(4,0)) as talents_into,
              |	cast(round(rand()*9) as decimal(4,0)) as college_graduates,
              |	current_date() as alter_time
              |from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t13")

        //2018
        spark.sql(
            """
              |select
              |	company_name,
              |	identification,
              |	'qa' position,
              |	cast(round(rand()*9) as decimal(4,0)) as person_total,
              |	cast(round(rand()*9) as decimal(4,0)) as local_place,
              |	cast(round(rand()*9) as decimal(4,0)) as out_place,
              |	cast(round(rand()*9) as decimal(4,0)) as talents_into,
              |	cast(round(rand()*9) as decimal(4,0)) as college_graduates,
              |	current_date() as alter_time
              |from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t14")


        //2019
        spark.sql(
            """
              |select
              |	company_name,
              |	identification,
              |	'总监' position,
              |	cast(round(rand()*9) as decimal(4,0)) as person_total,
              |	cast(round(rand()*9) as decimal(4,0)) as local_place,
              |	cast(round(rand()*9) as decimal(4,0)) as out_place,
              |	cast(round(rand()*9) as decimal(4,0)) as talents_into,
              |	cast(round(rand()*9) as decimal(4,0)) as college_graduates,
              |	current_date() as alter_time
              |from t2
              |
              |
              |
            """.stripMargin).createOrReplaceTempView("t15")

        //2015~2019
        spark.sql(
            """
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t11
              |
              |union all
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t12
              |
              |union all
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t13
              |
              |union all
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t14
              |
              |union all
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t15            """.stripMargin).createOrReplaceTempView("t20")

        val df = spark.sql(
            """
              |
              |select
              |	1 id,
              |	company_name,
              |	identification,
              |	position,
              |	person_total,
              |	local_place,
              |	out_place,
              |	talents_into,
              |	college_graduates,
              |	alter_time
              |from t20
              |
              |
            """.stripMargin)




        //data.show()


       df.write.mode(SaveMode.Append).jdbc(url, table2, props) //Overwrite  字段不一致会把表格重新建表，mysql注释都没了
        spark.close()

    }
}