package com.gosun


import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.{ StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.SparkSession
import scala.collection.mutable.ArrayBuffer

object yy21_yc_place_information {
  //Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession
      .builder()
      .config("spark.network.timeout", "1200")
      //.master("local[*]")
      .appName("SQLContextApp")
      .getOrCreate()
    val options = Map(
      "es.nodes.wan.only" -> "true",
      "es.nodes" -> "10.178.77.8",
      "es.port" -> "9200",
      "es.read.field.as.array.include" -> "arr1, arr2",
      "es.scroll.size" -> "10000",
      "es.input.use.sliced.partitions" -> "false"
    )
    val index = "yy21_yc_place_information"
    val frame: DataFrame = sparkSession
      .read
      .format("es")
      .options(options)
      .load(index)
    //frame.show()

    val resSchema = StructType(
      List(
        StructField("id", StringType, true),
        StructField("type", StringType, true),
        StructField("Company", StringType, true),
        StructField("Company_id", StringType, true),
        StructField("legal_person", StringType, true),
        StructField("legal_telephone", StringType, true),
        StructField("charge_name", StringType, true),
        StructField("charge_telephone", StringType, true),
        StructField("address", StringType, true),
        StructField("Business_license", StringType, true),
        StructField("Licensing_information", StringType, true),
        StructField("remarks", StringType, true),
        StructField("legal_telephone_aes", StringType, true),
        StructField("charge_telephone_aes", StringType, true)
      )
    )
    val value: RDD[Row] = frame.rdd.mapPartitions(iter => {
      val list = ArrayBuffer[Row]()
      while (iter.hasNext) {
        val row: Row = iter.next()
        val str1 = row.getAs[String]("id")
        val str2 = row.getAs[String]("type")
        val str3 = row.getAs[String]("Company")
        val str4 = row.getAs[String]("Company_id")
        val str5 = row.getAs[String]("legal_person")
        val strtmp1 = row.getAs[String]("legal_telephone")
        val str6: String = DataEncry.changPhone(strtmp1)
        val str7 = row.getAs[String]("charge_name")
        val strtmp2 = row.getAs[String]("charge_telephone")
        val str8: String = DataEncry.changPhone(strtmp2)
        val str9 = row.getAs[String]("address")
        val str10 = row.getAs[String]("Business_license")
        val str11 = row.getAs[String]("Licensing_information")
        val str12 = row.getAs[String]("remarks")
        val str13: String = DataEncry.changAES(strtmp1)
        val str14: String = DataEncry.changAES(strtmp2)
        val schema: GenericRowWithSchema = new GenericRowWithSchema(Array(str1, str2, str3, str4, str5,
          str6, str7, str8, str9, str10, str11, str12, str13, str14), resSchema)
        list.append(schema)
      }
      list.iterator
    })
    val dataFrame: DataFrame = sparkSession.createDataFrame(value, resSchema)
    //val path = "C:\\Users\\Raichard\\Desktop\\宜春\\JSON\\"
    //val abspath = path + index
    val path = "file://tmp/yichun/zz21_vw_baseinfo.json"
    val str1: String = dataFrame.toJSON.collect.mkString("[", ",", "]")
    getResult.getData(str1, path)

    /*dataFrame
      .write
      .mode(SaveMode.Overwrite)
      .format("json")
      .save(abspath)*/
    println("输出完成！")
    sparkSession.close()

  }

}
