package com.sdehualu.spark

import com.sdehualu.utils.{HDFSUtils, NetCDFInternalUtils, NetCDFUtils}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

import scala.collection.mutable.ArrayBuffer

/**
 * @Author: 吴敬超
 * @Date: 2021/7/20 13:39
 */
object test {
  def main(args: Array[String]): Unit = {


    val structFields = Array(StructField("time", StringType, true),
      StructField("lat", StringType, true),
      StructField("lon", StringType, true),
      StructField("QCHANR", StringType, true))


    //    创建schema
    val structType = StructType(structFields)


    println(structType)


    println(structFields(0))


    val arrbuff1 = ArrayBuffer[StructField]()

    arrbuff1 += StructField("name", StringType, true)
    arrbuff1 += StructField("age", StringType, true)

    val structTypebf = StructType(arrbuff1)


    println(structTypebf)
    val inputfileurl = "hdfs://192.168.22.9:8020"
    println("inputfileurl is:" + inputfileurl)

    val inputfile = "/par0/data.nc"
    println("inputfile is:" + inputfile)


    val inputfileflag = new HDFSUtils()
    //    如果输入文件或不存在那么不继续
    if (!inputfileflag.FileExist(inputfileurl, inputfile)) {

      println("输入文件不存在")


      //      System.exit(1)
    }


    println("*********************")

    val ncds = NetCDFUtils.loadDFSNetCDFDataSet(inputfileurl, inputfile, 4096, false)

    val nciu = new NetCDFInternalUtils()


    val varflg = nciu.Varflag(ncds, "varname")
    if (!varflg) {
      println("此nc文件中不存在变量" + "varname")

    }

    println("*********************")


  }


}
