package com.sugon.binary

import java.io.{BufferedReader, InputStreamReader}
import java.util.zip.{ZipEntry, ZipInputStream}

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.input.PortableDataStream
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}

import scala.collection.mutable.ArrayBuffer

object Wnsj {

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark: SparkSession = SparkSession.builder()
      .appName("sugon_my_spark")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext

    val dataRDD: RDD[String] = sc.binaryFiles("seaweedfs://92.129.3.2:30888/buckets/tcga/test/")
      .flatMap { case (_: String, content: PortableDataStream) =>
        val zis = new ZipInputStream(content.open)
        Stream.continually(zis.getNextEntry)
          .takeWhile((_: ZipEntry) != null)
          .flatMap { _: ZipEntry =>val br = new BufferedReader(new InputStreamReader(zis))
          Stream.continually(br.readLine()).takeWhile((_: String) != null)
        }
      }

    val dastards: RDD[Row] = dataRDD.map((f: String) => {
      val bcpList: Array[String] = f.split(",", -1)
      var seq: Seq[String] = Seq[String]()
      for (itr <- bcpList) {
        seq = seq :+ itr
      }
      val row: Row = Row.fromSeq(seq)
      row
    }).filter((r: Row) =>{r.get(1) !="mac"})

    val arrayBuffer = ArrayBuffer(
      StructField("Id", StringType, nullable = true),
      StructField("mac", StringType, nullable = true),
      StructField("brand", StringType, nullable = true),
      StructField("cache_ssid", StringType, nullable = true),
      StructField("capture_time", StringType, nullable = true),
      StructField("terminal_fieldstrength", StringType, nullable = true),
      StructField("identification_type", StringType, nullable = true),
      StructField("certificate_code", StringType, nullable = true),
      StructField("ssid_position", StringType, nullable = true),
      StructField("access_ap_mac", StringType, nullable = true),
      StructField("access_ap_channel", StringType, nullable = true),
      StructField("access_ap_encryption_type", StringType, nullable = true),
      StructField("x_coordinate", StringType, nullable = true),
      StructField("y_coordinate", StringType, nullable = true),
      StructField("netbar_wacode", StringType, nullable = true),
      StructField("collection_equipment_id", StringType, nullable = true),
      StructField("collection_equipment_longitude", StringType, nullable = true),
      StructField("collection_equipment_latitude", StringType, nullable = true),
      StructField("areaid", StringType, nullable = true),
      StructField("memo1", StringType, nullable = true),
      StructField("memo2", StringType, nullable = true),
      StructField("memo3", StringType, nullable = true)
    )
    val schema = StructType(arrayBuffer.toList)
    val df: DataFrame = spark.createDataFrame(dastards, schema)
    df.write.format("orc").mode(SaveMode.Append).save("seaweedfs://92.129.3.2:30888/buckets/warehouse-xuanyuan-hive-metastore/ysk.db/ysk_z_devices_t/")
  }
}
