import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
object LogAnalysis {
  def main(args: Array[String]): Unit = {
    System.setProperty("spark.sql.warehouse.dir","d:/warehouse")
    val spark = SparkSession.builder().master("local[4]").appName("Test").getOrCreate()
    import spark.implicits._
    val schemaExp = StructType(
      StructField("tid",DataTypes.StringType,false)
        ::StructField("uid",StringType,false)
        ::StructField("srcIP",StringType,false)
        ::StructField("url",StringType,false)
        ::StructField("time",DataTypes.TimestampType,false)
        ::StructField("src",StringType,false)
        ::StructField("module",StringType,false)
        ::StructField("function",StringType,false)
        ::StructField("operator",StringType,false)
        ::StructField("referer",StringType,false)
        ::StructField("sucess",StringType,false)
        ::StructField("error",StringType,false)
        ::StructField("param",StringType,false)
        ::StructField("reactTime",StringType,false)
        ::Nil)
    val line = spark.readStream.format("socket").option("host","192.168.0.83").option("port",9999).load()
      .selectExpr("cast (value as string) as json").select(from_json($"json",schema = schemaExp).as("data")).select("data.*")
    line.createOrReplaceTempView("t")
//    val r = line.where("date_format(time,'YM')=20188").groupBy("uid").count()

//    val r = spark.sql("select count(tmp.uid) from (select uid,date_format(time,'YMd') as date from t where date_format(time,'YM')='20188' group by uid,date_format(time,'YMd')) tmp group by uid")
//    val r = spark.sql("select count(distinct(date_format(time,'YM')) from t where date_format(time,'YM')='20188' group by uid having count(distinct(date_format(time,'YM'))>1)")
    var r = spark.sql("select distinct(uid) from t")
    r.writeStream.outputMode("append").format("console").option("truncate","false").start().awaitTermination()

    //    newT.createOrReplaceTempView("log")
    //    spark.sql("")
    //    val r = newT.select($"uid",$"time").where("date_format(time,'%Y%m')=date_format(now(),'%Y%m')")
    //    r.writeStream.outputMode("append").format("console").option("truncate","false").start().awaitTermination()
    //    spark.sql("select * from t where date_format(time,'%Y%m')=date_format(now(),'%Y%m')").writeStream.outputMode("append").format("console").option("truncate","false").start().awaitTermination()

  }
}
