package com.lianzt

import net.minidev.json.{JSONArray, JSONObject, JSONValue}
import org.apache.spark.sql.{Row, SparkSession, functions}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

object TccglCLfl {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("count log file")
      .getOrCreate()

    //    val hdfs = "hdfs://FatServer1:9000"
    var hdfs = "hdfs://hacluster/user/sjtp/tccgl"

    val sc = spark.sparkContext;
    val table = sc.textFile(s"$hdfs/log/*")
      .filter(x => x.indexOf("['log-request-time']") != -1 && x.indexOf("S50008") != -1)
      .mapPartitions[JSONObject](list => {
      list.map(x => {
        JSONValue.parse(x.substring(x.indexOf("{"))).asInstanceOf[JSONObject]
      })
    })
      .map(x => {
        Row(x.get("kcbh").asInstanceOf[String], x.get("clfl").asInstanceOf[String])
      })

    val schema =
      StructType(
        StructField("kcbh", StringType, true) ::
          StructField("clfl", StringType, true) :: Nil)

    spark.createDataFrame(table, schema).createOrReplaceTempView("clfl_tmp")
    val result = spark.sql("select distinct kcbh,clfl from clfl_tmp order by kcbh")

    result.rdd.saveAsTextFile(s"$hdfs/result")

    spark.stop()
  }
}
