package com.bijie.ent_huaxiang

import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}

/**
  * 目的：纠正position 的关联企业id
  */

object bijie_position {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val table1 = "company_base_info"
        val table2 = "position_cp2"
        val table3 = "position"  //todo 注意看表格没有改



        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        import spark.implicits._

        spark.read.jdbc(url, table1, props).createOrReplaceTempView("t1")
        spark.read.jdbc(url, table2, props).createOrReplaceTempView("t2")

        spark.sql(
            """
              |select
              | company_name,
              | identification
              |from t1
              |
              |
            """.stripMargin).createOrReplaceTempView("t1_1") //todo 三千企业 三千id
        //-----------------------------------------------------------------------------------------------

        spark.sql(
            """
              |
              |select
              |  t2.id
              |, t1_1.identification
              |, t2.ent_name
              |, t2.name
              |, t2.function_type
              |, t2.posts
              |, t2.sex
              |, t2.experience
              |, t2.education
              |, t2.salary
              |, t2.age_range
              |, t2.job_nature
              |, t2.description
              |, t2.keyword
              |, t2.keyword_two
              |, t2.keyword_three
              |, t2.province
              |, t2.province_code
              |, t2.city
              |, t2.city_code
              |, t2.area
              |, t2.area_code
              |, t2.street
              |, t2.street_code
              |, t2.address
              |, t2.latitude
              |, t2.longitude
              |, t2.status
              |, t2.views
              |, t2.publish
              |, t2.skill
              |, t2.sys_area_code
              |, t2.batchNumber
              |, t2.age_begin
              |, t2.age_end
              |, t2.salary_begin
              |, t2.salary_end
              |, t2.source
              |from t2  --position
              |left join t1_1
              |on t2.ent_name = t1_1.company_name
              |
            """.stripMargin).createOrReplaceTempView("t3")

        val df = spark.sql(
            """

            select
            id,
            case when identification is null then round(rand()*10)+round(rand()*100)+round(rand()*1000)+round(rand()*10000) else identification end as ent_id,
            ent_name,
            name,
            function_type,
            posts,
            sex,
            experience,
            education,
            salary,
            age_range,
            job_nature,
            description,
            keyword,
            keyword_two,
            keyword_three,
            province,
            province_code,
            city,
            city_code,
            area,
            area_code,
            street,
            street_code,
            address,
            latitude,
            longitude,
            status,
            views,
            publish,
            skill,
            sys_area_code,
            batchNumber,
            age_begin,
            age_end,
            salary_begin,
            salary_end,
            source
            from t3


            """.stripMargin)



        //df.show()

        df.write.mode(SaveMode.Append).jdbc(url,table3, props)
        spark.close()

    }
}
