package com.ipinyou.offscore

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.slf4j.LoggerFactory
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.SQLContext
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.DeserializationFeature


object OffScoring {

  val a = 1.0
  val b = 2.0

  // 0 测试, 1 正式
  var flag = "0"

  var output = "file:///Users/miaoyujia/tmp/offscore"
  var all_rule = "file:///Users/miaoyujia/git/mobile-userprofile-src/offline_score/file/off_rule"
  //hdfs://192.168.146.68/user/root/flume/3th_logs/asiainfo/dxcloud_ub/2016/11/02/with_pyid
  var input = "file:///Users/miaoyujia/git/mobile-userprofile-src/offline_score/file/data_t"
  // log
  val log = LoggerFactory.getLogger(this.getClass)

  def json2list(jsonString: String, cate: String) = {
    val jString = jsonString.replace("\"", "").replace("{", "").replace("}", "").trim()
    var jsonlist = jString.split(",", -1)
    val keylist = for (i <- jsonlist) yield (cate + ":" + i.split(":", -1)(0))
    keylist
  }
  
  

  def main(args: Array[String]): Unit = {

    if (args.length > 0) {
      flag = args(0)
      output = args(1)
      all_rule = args(2)
      input = args(3)
    }

    val config = new SparkConf().setAppName("mobile.offscore")
    val sc = new SparkContext(config)

    val dtest = sc.textFile(input)
    // 离线规则
    // key -> topapp:appid   value -> cate:score|cate2:score2
    val ruRdd = sc.textFile(all_rule)
    val ruMap = ruRdd.map { x =>
      val arr = x.split("\t", -1)
      (List(arr(1), arr(3)).mkString(":"), List(arr(0), arr(4)).mkString(":"))
    }.groupByKey().map(line => (line._1, line._2.mkString("#"))).collectAsMap()
    val broMap = sc.broadcast(ruMap)



    val reRdd = dtest.flatMap { record =>
      val arr = record.split("\t", -1)
      val pyid = arr(11)

      if (pyid == "null") {
        List()
      } else {

        val site_json = arr(3)
        val app_json = arr(4)
        val search_json = arr(5)

        var sitelist = new Array[String](0)
        if (site_json != "null") {
          sitelist = json2list(site_json, "topsite")
        }

        var applist: Array[String] = new Array[String](0)
        if (app_json != "null") {
          applist = json2list(app_json, "topapp")
        }

        var searchlist: Array[String] = new Array[String](0)
        if (search_json != "null") {
          searchlist = json2list(search_json, "search")
        }

        val list = sitelist.union(applist).union(searchlist)
        val res = for (rule <- list) yield (pyid, rule)
        res
      }
    }.flatMap {
      case (pyid, rule) =>
        val m = broMap.value
        if (m.contains(rule)) {
          val cate_scores = m.get(rule).get
          val cs_arr = cate_scores.split("#", -1)
          for (k <- cs_arr) yield (List(pyid, k.split(":")(0)).mkString("\t"), k.split(":")(1).toFloat)
        } else {
          List()
        }
    }.reduceByKey(_ + _).map { x =>
      List(x._1, x._2).mkString("\t")
    }.saveAsTextFile(output)

  }
}

