package com.me.bigdata

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 解析json日志，并使用spark sql进行查询
  * 日志格式：
  * 2017/10/22-00:05:40.198 INFO [http-nio-8080-exec-8] com.cmld.api.core.controller.BaseController:143>>request info :{"checkSession":true,"checkSign":false,"dataKey":"8HkocpYLeG1LNi5m","decodeRequest":false,"encodeResponse":false,"platform":"600","salt":"123456","service":"payProduct","sign":"0d35c9b481e437719063293051cb4873","signKey":"8HkocpYLeG1LNi5mNN00","timestamp":1508601942000,"version":"1.0"}--execute time millis---3
  */
object ScalaParseJson {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("ScalaParseJson")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

    val lines = sc.textFile(args(0))
    val jsonRDD = lines.filter(_.contains("request info :")).map(line => {
      //截取json字符串
      (line.substring(line.indexOf("{"), line.lastIndexOf("}") + 1))
    })

    val dataFrame = sqlContext.read.json(jsonRDD)
    dataFrame.registerTempTable("request")

    sqlContext.sql("select * from request").show
    val groupData = sqlContext.sql("select service,version,count(1) as count from request group by service,version order by count desc")
    groupData.toJSON.saveAsTextFile(args(1))
  }
}
