package com.bijie.college_graduates

import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}
import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.spark
import org.apache.spark.{SparkConf, SparkContext}


object collegeGraduates {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
          .builder()
          .master("local[*]")
          .appName("JDBCRead")
          .getOrCreate()


        // 专用的读
        val url1 = "jdbc:mysql://192.168.1.90:3306/cy_analysis_data"
        val url2 = "jdbc:mysql://192.168.1.90:3306/test"
        val table1 = "college_graduate"
        val table2 = "big_screen_result_copy1"
        //val table3 = "position"  //todo 注意看表格没有改



        val props: Properties = new Properties()
        props.setProperty("user", "root")
        props.setProperty("password", "bigData@123")

        import spark.implicits._

        spark.read.jdbc(url2, table1, props).createOrReplaceTempView("t1")

        //01查询元数据
        spark.sql(
            """
              |select
              | *
              |from t1
              |
              |
            """.stripMargin).createGlobalTempView("t1") //todo 三千企业 三千id
        //-----------------------------------------------------------------------------------------------

        //02
      spark.sql("SElect id,name from t1")
        spark.sql(
            """
              |
              |select
              |	name,
              |	'企业招聘' content,
              |	edu_time
              |from t1
              |where enterprise_recruitment is not null
              |
              |union all
              |select
              |	name,
              |	'自主创业' content,
              |	edu_time
              |from t1
              |where autonomy_work is not null
              |
              |union all
              |select
              |	name,
              |	'单位就业' content,
              |	edu_time
              |from t1
              |where company_work is not null
              |
              |union all
              |select
              |	name,
              |	'特殊岗位就业' content,
              |	edu_time
              |from t1
              |where special_position_work is not null
              |
              |union all
              |select
              |	name,
              |	'未就业' content,
              |	edu_time
              |from t1
              |where unemploy_type is not null
              |
              |union all
              |select
              |	name,
              |	'其他' content,
              |	edu_time
              |from t1
              |where work_other is not null  --人，就业情况，时间
              |
              |
            """.stripMargin).createOrReplaceTempView("t2")

            spark.sql("select name, content, date_format(edu_time,'yyyy') as df_year from t2")
              .createOrReplaceTempView("t3")


        val df = spark.sql(
            """

            select
                count(1) as num,--应届毕业生就业情况
                concat("应届",content) as content,
             '毕业生就业情况' as name
            from (
                select
                    name,
                    content,
                    df_year
                from t3
                where df_year=date_format(current_date(),'yyyy')  ---今年毕业生
                )a
            group by content

            union all
            select
                count(1) as num,--往届毕业生就业情况
                concat("往届",content) as content,
             '毕业生就业情况' as name
            from (
                select
                    name,
                    content,
                    df_year
                from t3
                where df_year != date_format(current_date(),'yyyy')  ---往年毕业生
                )a
            group by content

            """.stripMargin)

            df.show()


        //往mysql插入指定字段数据

       // val df = Seq(StuInfo(188,"盖伦"), StuInfo(227,"扎克")).toDF()
        //mysql的参数设置
        val url4 = "jdbc:mysql://192.168.1.90:3306/test"
        val user4 = "root"
        val password4 = "bigData@123"
        val props4 = new Properties()
        props4.put("user", user4)
        props4.put("password", password4)
        props4.setProperty("useSSL", "false")
        props4.setProperty("useUnicode", "true")
        props4.setProperty("characterEncoding", "utf8")
        var connection: Connection = null

        df.foreachPartition(iter => {
            try {
                classOf[com.mysql.jdbc.Driver]
                connection = DriverManager.getConnection(url4, props4)
                while (iter.hasNext) {
                    val row = iter.next() //每行数据 num,content,name
                    val num = row.get(0)
                    val content = row.getAs[String]("content")
                    val name = row.get(2).toString

                    val sql = s"insert into test.big_screen_result_copy1(num,content,name) values('$num','$content','$name')"
                    connection.createStatement().executeUpdate(sql)
                }
            }   catch {
            case e: Exception => println(e.printStackTrace())
        } finally {
            connection.close()
        }
        })



        //03

        //04

        //05




        //df.show()

       // df.write.mode(SaveMode.Append).jdbc(url,table3, props)

        spark.close()
    }



}
