package com.sdehualu.utils

import org.apache.spark.sql.types.{StructType, _}
import org.apache.spark.sql.{Row, SparkSession}
import ucar.ma2.ArrayFloat

object sparkhdfsnclistrow {

  def main(args: Array[String]): Unit = {
    //创建SparkConf()并且设置App的名称
    val spark = SparkSession
      .builder()
      .appName("SparkSessionT")
      .master("local[6]")
      .getOrCreate()
    val sc = spark.sparkContext

    val data = List(1, 2, 3, 4, 5, 6)
    val nameRDD = sc.parallelize(data, 6)


    val r4 = nameRDD.map(fileName => {
      val ncf = NetCDFUtils.loadDFSNetCDFDataSet("hdfs://192.168.22.9:8020",
        "/par0/B.E.13.B1850C5.ne120_t12.sehires38.003.sunway_02.rtm.h0.0022-02.nc",
        4000, false)
      val timeVar = ncf.findVariable("time")
      val latVar = ncf.findVariable("lat")

      val lonVar = ncf.findVariable("lon")

      val UVar = ncf.findVariable("QCHANR")

      var listrow: List[Row] = Nil


      var a = 0;
      for (a <- 1 to 10) {

        var tv = Row(
          timeVar.read.asInstanceOf[ArrayFloat.D1].get(0).toString,
          latVar.read.asInstanceOf[ArrayFloat.D1].get(a).toString,
          lonVar.read.asInstanceOf[ArrayFloat.D1].get(fileName).toString,
          UVar.read.asInstanceOf[ArrayFloat.D3].get(0, a, fileName).toString)
        listrow = listrow :+ tv
      }
      //      r4 返回listrow  listrow是一个List[Row]
      listrow
    })
    //    r4.foreach(println)


    //   把r4 的每一个压扁
    val r5 = r4.flatMap(a => {

      a
    }

    )

    r5.foreach(println)

    val structFields = Array(StructField("time", StringType, true),
      StructField("lat", StringType, true),
      StructField("lon", StringType, true),
      StructField("QCHANR", StringType, true))


    //    创建schema
    val structType = StructType(structFields)

    //    通过rdd创建DataFrame
    val df = spark.createDataFrame(r5, structType)
    //    把数据show出来
    df.show
    //    写成csv文件
    df.repartition(1).write
      .option("header", "true")
      .csv("./nccsvfile")

    //      存储到hdfs
    //      .csv("hdfs://192.168.22.9:8020/par0/nccsv")


    //停止sc，结束该任务
    sc.stop();
  }
}