package com.algo.url

import org.apache.spark._
import org.apache.spark.broadcast
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD

import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet

import com.algo.utils.PanelUtils
import tw.edu.ntu.csie.liblinear._

object Main {

  val modelPartitions = 20
  
  def testWord2Vec(args: Array[String]){
    val conf = new SparkConf().setAppName("TA")
    val sc = new SparkContext(conf)
    
    //step1: get panel feature middle data
    val panelFeatureMid = PanelFeature.getWord2VecFeature(args(0), args(1), sc).cache
    
    val panelFeature = panelFeatureMid.map(tp => {
      //the following index is related to panel
      val items = tp._2._2.split(",")
      val list = PanelUtils.parsePanelLabel(items)
      (tp._1, (list, tp._2._1))})
      
      for(i <- 0 to 0){
        val index = for(i <- 0 to 199) yield  {i}
        val trainingData = panelFeature.map(tp => {
	    	new DataPoint(index.toArray, tp._2._2, tp._2._1(i) )
	    }).repartition(modelPartitions)
	    println("i = " + ",Accuracy = " + WrappedClassifierModel.validate(trainingData,  "-s 0 -c 1.0", 5, true, true))
      }
  }
  def testUrlClassifier(args: Array[String]){
    val conf = new SparkConf().setAppName("TA")
    val sc = new SparkContext(conf)
    
    //step1: get panel feature middle data
    val panelFeatureMid1 = PanelFeature.getPanelFeatureMid(args(0), args(1), sc).cache
    
    //step2: broadcast all unique spid and index them
    val spidSet = panelFeatureMid1.mapPartitions(tp => {
       val spidSet = tp.foldLeft(HashSet.empty[String])((s, elem) => s.union(elem._2._1.keySet))
       spidSet.map(s => (s, 1)).toIterator
       }).reduceByKey((a, b) => 1).map(tp => tp._1).collect
    val indexSet = sc.broadcast(spidSet)
    val panelFeatureMid = panelFeatureMid1.mapPartitions(tp => 
      {var i = -1;
       val indexMap = indexSet.value.foldLeft(HashMap.empty[String, Int])((s, elem) => {i = i+1; s.put(elem, i); s});
       val out = tp.map(s => (s._1, (s._2._2, s._2._1.map(spid => (indexMap.get(spid._1).get, spid._2) ))))
       out.toIterator
       }).cache
    
    //step 3: filter out training feature and predicting feature
    val panelFeature = panelFeatureMid.map(tp => {
      //the following index is related to panel
      val items = tp._2._1.split(",")
      val list = PanelUtils.parsePanelLabel(items)
      (tp._1, (list, tp._2._2))})
    
    for(i <- 1 to 3){
	      val trainingData = panelFeature.map(tp => { val pair = tp._2._2
	    	val keys = pair.keySet.toList.sorted
	    	new DataPoint(keys.toArray, pair.values.toArray, tp._2._1(i) )
	    }).repartition(modelPartitions)
	    println("i = " + ",Accuracy = " + WrappedClassifierModel.validate(trainingData,  "-s 0 -c 1.0", 5, true, true))
    }
  }
  def main(args: Array[String]): Unit = {
    //testUrlClassifier(args)
    testWord2Vec(args)
  }
}
