package yhb.fpm

import java.math.BigInteger

import breeze.numerics._
import org.apache.spark.rdd.RDD

/**
  * Created by root on 15-12-16.
  */
object AprioriAlgorithm {
  System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

  //最小支持度
  private var minSupport = 0.1
  //最小置信度
  private var minConfidence = 0.6
  //频繁集的最大长度
  private var maxLen = 10

  def setMinSupport(minSupport:Double) = {
    this.minSupport = minSupport
    this
  }

  def setMinConfidence(minConfidence:Double) = {
    this.minConfidence = minConfidence
    this
  }

  def setMaxLen(maxLen:Int) = {
    this.maxLen = maxLen
    this
  }

  def getFrequentSets(srcItemSet:RDD[Set[String]]) = {

    /**
      * 递归计算所有频繁集
      * @param srcItemSet 项目集
      * @param minSupInt 最小支持度
      * @param itemK 频繁k项集
      * @param k
      * @param frequentSets 已经完成计算的频繁集
      * @param maxLen 频繁集的最大容量
      * @return
      */
    def recursionFrequentSets(srcItemSet:RDD[Set[String]],minSupInt:Int,itemK:RDD[Set[String]],k:Int,
                              frequentSets:RDD[(Set[String],Int)],maxLen:Int= -1):RDD[(Set[String],Int)] = {

      val itemMap = itemK.map((1,_))

      //计算候选集
      // 使用collect可能会出现内存不足的问题,主要是为了后面计算速度和方便,如果出现内存问题,改为RDD,后面用join的形式的代替flatMap
      val candidateItem = itemMap.join(itemMap).map(_._2).map(x=> x._1 union x._2).filter(_.size == k+1)
        .distinct().collect()

      //计算频繁集
      val itemAddOne = srcItemSet.flatMap(x=>{
        val arr = collection.mutable.ArrayBuffer[Set[String]]()
        candidateItem.foreach(y=>{
          if(y subsetOf x) arr += y
        })
        arr.map((_,1))
      }).reduceByKey(_+_).filter(_._2>=minSupInt)

      if(itemAddOne.count == 0){
        //如果没有k+1项频繁集了,直接返回
        frequentSets
      } else if(maxLen >= 0 && k+1 == maxLen){
        //如果达到设置的maxLen,直接返回
        frequentSets union itemAddOne
      } else{
        //递归计算频繁集
        recursionFrequentSets(srcItemSet,minSupInt,itemAddOne.map(_._1),k+1,
          frequentSets union itemAddOne,maxLen)
      }
    }
    srcItemSet.cache
    if(minSupport<0) throw new Throwable("minSup less then 0 !!!")
    val minSupInt = ceil(minConfidence * srcItemSet.count()).toInt

    //1.求出所有的频繁一项集
    val item1 = srcItemSet.flatMap(_.toTraversable).map(x=>(x,1)).reduceByKey(_+_).filter(_._2>=minSupInt).
      map(x=>(Set(x._1),x._2))

    //2.递归计算所有频繁集
    recursionFrequentSets(srcItemSet,minSupInt,item1.map(_._1),1,item1,maxLen)
  }

  /**
    * 获取所有规则
    * @param srcItemSet 数据源,事物集
    */
  def getAssociationRules(srcItemSet:RDD[Set[String]]) = {

    srcItemSet.cache

    //1.递归计算所有频繁集
    val frequentSets = getFrequentSets(srcItemSet)

    //2.计算 信任度/提升度 conf/lift
    val totalN = srcItemSet.count()
    val rulesItem = frequentSets.filter(_._1.size>1).flatMap(x=>{
      //每个频繁集,遍历子集,形成 reason -> result
      val (frequentSet,supInt) = x
      val fsSize = frequentSet.size
      val rules = collection.mutable.ArrayBuffer[(Set[String],Set[String],Int)]()
      //采用非递归方式遍历子集
      for(i<- BigInt(1) until BigInt(2).pow(fsSize)-1){
        var ten2two = new BigInteger(i.toString).toString(2)
        ten2two = "0"*(fsSize-ten2two.length) + ten2two
        val reason = collection.mutable.Set[String]()
        for(rea<-ten2two.zip(frequentSet) if rea._1 == '1'){
          reason += rea._2
        }
        rules += Tuple3(reason.toSet,frequentSet--reason,supInt)
      }
      rules
    }).map(x=>{
      val (reason,result,supInt) = x
      (reason,(result,supInt.toDouble))
    }).//join第一次,用来计算reason的sup
      join(frequentSets).map(x=>{
      val (reason,((result,sup),reasonSup)) = x
      (result,(reason,reasonSup.toDouble,sup))
    }).//join第二次,用来计算result的sup
      join(frequentSets).map(x=>{
      val (result,((reason,reasonSup,sup),resultSup)) = x
      //计算conf和lift
      val conf = sup/reasonSup
      val lift = (sup * totalN)/(reasonSup * resultSup)
      (reason,result,sup.toInt,conf,lift)
    }).//根据conf和lift过滤规则
      filter(x=> x._5 > 1 && x._4 > minConfidence)
    rulesItem
  }

  def main (args: Array[String]) {

  }

}
