package com.fanli.bigdata.mytest

import com.fanli.bigdata.db.StateRedisConnections
import kafka.serializer.StringDecoder
import org.apache.spark._
import org.apache.spark.rdd.RDD

import org.apache.log4j.{Level, Logger}
import java.sql.{Connection, DriverManager, ResultSet}

import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.dstream.DStream

object GoshopDemo {
  Logger.getLogger("org").setLevel(Level.ERROR)
  val conf = new SparkConf().setAppName("MySpakDemo1").setMaster("local[*]")
  val sc = new SparkContext(conf)

  def main (args: Array[String]) {
//    val log = """192.168.2.206 data={"data1":6444967106,"data2":1478098831,"data3":"23995644|23995644|23995644","data10":"super_women_brand","data12":"539033434659","serial_id":300004,"time":1478098831,"user_id":"23995644","ip":"112.14.84.88"} 1532 2016-11-02 23:00:31""".stripMargin
//    var goshop  =GoshopFun.getLogFunc(log)
//    println(goshop.ds +"\t"+goshop.hour +"\t"+goshop.tracking_id+"\t"+ goshop.pid +"\t"+ goshop.uid +"\t"+ goshop.lc)
    goshopBatchMain()
  }

  def goshopBatchMain():Unit={
    var inputRDD = sc.textFile("file:///D:/nodelog.txt")
    val log: RDD[(RealtimeGoShopPUvKey,RealtimeGoShopPUvValue)] = inputRDD.mapPartitions(GoshopFun.MatchLogBatchFunc(_))
    val res: Array[(RealtimeGoShopPUvKey,RealtimeGoShopPUvValue)] = log.collect()
    println(res.mkString(","))
    GoshopFun.goshopState(res)
    sc.stop()
  }

  def goshoSteamingMain():Unit={
    var inputRDD = sc.textFile("file:///D:/nodelog.txt")
    val log: RDD[(RealtimeGoShopPUvKey,RealtimeGoShopPUvValue)] = inputRDD.mapPartitions(GoshopFun.MatchLogBatchFunc(_))
    val res: Array[(RealtimeGoShopPUvKey,RealtimeGoShopPUvValue)] = log.collect()
    println(res.mkString(","))
    GoshopFun.goshopState(res)
    sc.stop()
  }
}
