package com.bijie.ent_huaxiang

import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}

object bijie_social_security_to_accmulation_fund_info {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val table1 = "company_base_info"
        val table2 = "social_security_to_accmulation_fund_info"
        //company_name|turn_over|total_assets|production_value|time|identification|


        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        spark.read.jdbc(url, table1, props).createOrReplaceTempView("t1")

        spark.sql(
            """
              |select
              | company_name,
              | identification
              |from t1
              |
              |
            """.stripMargin).createOrReplaceTempView("t2") //todo 三千企业 三千id
        //-----------------------------------------------------------------------------------------------
        //2015
        spark.sql(
            """
              | select
              |company_name,
              |500+round(rand()*5)*99 as social_sum,
              |500+round(rand()*5)*99 as accumulation_sum,
              |500+round(rand()*5)*99 as person_total,
              |'2015' time,
              |identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t11")

        //2016
        spark.sql(
            """
              | select
              |company_name,
              |500+round(rand()*5)*99 as social_sum,
              |500+round(rand()*5)*99 as accumulation_sum,
              |500+round(rand()*5)*99 as person_total,
              |'2016' time,
              |identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t12")


        //2017
        spark.sql(
            """
              | select
              |company_name,
              |500+round(rand()*5)*99 as social_sum,
              |500+round(rand()*5)*99 as accumulation_sum,
              |500+round(rand()*5)*99 as person_total,
              |'2017' time,
              |identification
              | from t2
              |
            """.stripMargin).createOrReplaceTempView("t13")

        //2018
        spark.sql(
            """
              | select
              |company_name,
              |500+round(rand()*5)*99 as social_sum,
              |500+round(rand()*5)*99 as accumulation_sum,
              |500+round(rand()*5)*99 as person_total,
              |'2018' time,
              |identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t14")


        //2019
        spark.sql(
            """
              | select
              |company_name,
              |500+round(rand()*5)*99 as social_sum,
              |500+round(rand()*5)*99 as accumulation_sum,
              |500+round(rand()*5)*99 as person_total,
              |'2019' time,
              |identification
              | from t2
              |
              |
            """.stripMargin).createOrReplaceTempView("t15")

        //2015~2019
        spark.sql(
            """
              |select
              |company_name,
              |social_sum,
              |accumulation_sum,
              |person_total,
              |time,
              |identification
              |from t11
              |
              |union all
              |select
              |company_name,
              |social_sum,
              |accumulation_sum,
              |person_total,
              |time,
              |identification
              |from t12
              |
              |union all
              |select
              |company_name,
              |social_sum,
              |accumulation_sum,
              |person_total,
              |time,
              |identification
              |from t13
              |
              |union all
              |select
              |company_name,
              |social_sum,
              |accumulation_sum,
              |person_total,
              |time,
              |identification
              |from t14
              |
              |union all
              |select
              |company_name,
              |social_sum,
              |accumulation_sum,
              |person_total,
              |time,
              |identification
              |from t15
            """.stripMargin).createOrReplaceTempView("t20")

        val df = spark.sql(
            """

select
1 id,
company_name,
social_sum,
accumulation_sum,
person_total,
time,
identification
from t20


            """.stripMargin)
        df


        //data.show()


        df.write.mode(SaveMode.Append).jdbc(url, table2, props)
        spark.close()

    }
}
