package com.gosun

import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer

object yy21_cl_zxtj {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession
      .builder()
      .config("spark.network.timeout", "1200")
      .master("local[*]")
      .appName("SQLContextApp")
      .getOrCreate()
    val options = Map(
      "es.nodes.wan.only" -> "true",
      "es.nodes" -> "10.82.121.72",
      "es.port" -> "9200",
      "es.read.field.as.array.include" -> "arr1, arr2",
      "es.scroll.size" -> "10000",
      "es.input.use.sliced.partitions" -> "false"
    )
    val index = "yy21_cl_zxtj"
    val frame: DataFrame = sparkSession
      .read
      .format("es")
      .options(options)
      .load(index)
    //frame.show()

    val resSchema = StructType(
      List(
        StructField("xm", StringType, true),
        StructField("csny", StringType, true),
        StructField("xb", StringType, true),
        StructField("mz", StringType, true),
        StructField("jtzz", StringType, true),
        StructField("lxdh", StringType, true),
        StructField("sfzh", StringType, true),
        StructField("yb", StringType, true),
        StructField("sfnyh", StringType, true),
        StructField("cjlb", StringType, true),
        StructField("cjzh", StringType, true),
        StructField("gxlb", StringType, true),
        StructField("sqje", StringType, true),
        StructField("lqgxqc", StringType, true),
        StructField("xxlxdh", StringType, true),
        StructField("sqly", StringType, true),
        StructField("sqsj", StringType, true),
        StructField("zjffr", StringType, true),
        StructField("yxzzh", StringType, true),
        StructField("xxnf", StringType, true),
        StructField("lxdh_aes", StringType, true),
        StructField("sfzh_aes", StringType, true),
          StructField("cjzh_aes", StringType, true)
      )
    )
    val value: RDD[Row] = frame.rdd.mapPartitions(iter => {
      val list = ArrayBuffer[Row]()
      while (iter.hasNext) {
        val row: Row = iter.next()
        val str1 = DataEncry.changName(row.getAs[String]("xm"))
        val str2 = DataEncry.changPhone(row.getAs[String]("csny"))
        val str3 = row.getAs[String]("xb")
        val str4 = row.getAs[String]("mz")
        val str5 = DataEncry.changAddress(row.getAs[String]("jtzz"))
        val str6 = DataEncry.changPhone(row.getAs[String]("lxdh"))
        val str7 = DataEncry.changIDcard(row.getAs[String]("sfzh"))
        val str8 = row.getAs[String]("yb")
        val str9 = row.getAs[String]("sfnyh")
        val str10 = row.getAs[String]("cjlb")
        val str11 = DataEncry.changIDcard(row.getAs[String]("cjzh"))
        val str12 = row.getAs[String]("gxlb")
        val str13 = row.getAs[String]("sqje")
        val str14 = row.getAs[String]("lqgxqc")
        val str15 = row.getAs[String]("xxlxdh")
        val str16 = row.getAs[String]("sqly")
        val str17 = row.getAs[String]("sqsj")
        val str18 = row.getAs[String]("zjffr")
        val str19 = row.getAs[String]("yxzzh")
        val str20 = row.getAs[String]("xxnf")
        val str21 = DataEncry.changAES(row.getAs[String]("lxdh"))
        val str22 = DataEncry.changAES(row.getAs[String]("sfzh"))
        val str23 = DataEncry.changAES(row.getAs[String]("cjzh"))
        val schema: GenericRowWithSchema = new GenericRowWithSchema(Array(
          str1,
          str2,
          str3,
          str4,
          str5,
          str6,
          str7,
          str8,
          str9,
          str10,
          str11,
          str12,
          str13,
          str14,
          str15,
          str16,
          str17,
          str18,
          str19,
          str20,
          str21,
          str22,
          str23
        ), resSchema)
        list.append(schema)
      }
      list.iterator
    })
    val dataFrame: DataFrame = sparkSession.createDataFrame(value.coalesce(1), resSchema)
    val path = "C:\\Users\\Raichard\\Desktop\\宜春\\JSON\\"
    val abspath = path + index
    dataFrame.show(false)
    dataFrame
      .write
      .mode(SaveMode.Overwrite)
      .format("json")
      .save(abspath)
    println("输出完成！")
    sparkSession.close()

  }

}
