package com.data

import com.gosun.{DataEncry, getResult}
import java.util.{Date, Properties}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}

import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.mutable.ArrayBuffer

object zz21_vw_baseinfo {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession
      .builder()
      .config("spark.network.timeout", "1200")
      .config("spark.kryoserializer.buffer.max", "2000")
      .config("spark.executor.memory", "6g")
      .master("local[*]")
      .appName("SQLContextApp")
      .getOrCreate()


    val resSchema = StructType(
      List(
        StructField("CONTACTWAY", StringType, true), //联系方式
        StructField("PLACEOFORIGIN", StringType, true), //籍贯
        StructField("CAREERTEXT", StringType, true), //职业
        StructField("MOBILENUMBER", StringType, true),  //手机号码
        StructField("TELEPHONE", StringType, true), //固定电话
        StructField("IDCARDNO", StringType, true),//身份证号
        StructField("NAME", StringType, true), //姓名
        StructField("NATIVEPLACEADDRESS", StringType, true) //户籍地详址
      )
    )
    val dirpath = "C:\\Users\\Raichard\\Desktop\\宜春\\data1\\ZZ_VW_BASEINFO\\20200917\\ZZ_VW_BASEINFO_"
    for (i <- 1 to 1167) { // 1167
      val abspath = dirpath + i + ".json"
      val frame: DataFrame = sparkSession
        .read
        .json(abspath)
      val value: RDD[Row] = frame.rdd.mapPartitions(iter => {
        val list = ArrayBuffer[Row]()
        while (iter.hasNext) {
          val row: Row = iter.next()
          val str1 = row.getAs[String]("CONTACTWAY")
          val str2 = row.getAs[String]("PLACEOFORIGIN")
          val str3 = row.getAs[String]("CAREERTEXT")
          val str4 = row.getAs[String]("MOBILENUMBER")
          val str5 = row.getAs[String]("TELEPHONE")
          val str6 = row.getAs[String]("IDCARDNO")
          val str7 = row.getAs[String]("NAME")
          val str8 = row.getAs[String]("NATIVEPLACEADDRESS")
          val str9 = row.getAs[String]("GENDER")
          val schema: GenericRowWithSchema = new GenericRowWithSchema(Array(
            str1,
            str2,
            str3,
            str4,
            str5,
            str6,
            str7,
            str8,
            str9
          ), resSchema)
          list.append(schema)
        }
        list.iterator
      })
      val dataFrame: DataFrame = sparkSession.createDataFrame(value.coalesce(1), resSchema)
      val url = "jdbc:mysql://localhost:3306/mydb?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone = GMT"
      val prop: Properties = new Properties()

      prop.setProperty("user", "root")
      prop.setProperty("password", "123456")
      prop.setProperty("characterEncoding", "utf8")
      prop.setProperty("useSSL", "false")
      prop.setProperty("useUnicode", "true")
      dataFrame.write.mode(SaveMode.Append)
        .jdbc(url,"VW_BASEINFO",prop)

      println(abspath + "输出完成！")
    }
    sparkSession.close()
    println("全部完成")
  }

}
