package FPGrowth

import java.io.File
import java.util

import org.apache.commons.io.FileUtils

object perProc {


  val maps=new util.HashMap[String,String]()
  FileUtils.readLines(new File("topics/topicsName.csv")).forEach(
    line=>{
      val sts = line.split(",")
      maps.put(sts(0),sts(1))
    }
  )

  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", new File("").getAbsolutePath)
    val inputFile1: String = "topics/topics.csv"

    val map = new util.LinkedHashMap[String, util.ArrayList[String]]()
    val set = new util.HashSet[String]()
    var tmp = "";
    FileUtils.readLines(new File(inputFile1)).forEach(
      line => {
        val sts = line.split(",")

        if (!tmp.equals(sts(0))) {
          set.clear()
          tmp = sts(0)
        }
        if (!sts(2).equals("0"))
          if (!set.contains(sts(1))) {
            set.add(sts(1))
            map.computeIfAbsent(sts(0), _ => new util.ArrayList).add(sts(1))
          }
      }
    )
    val ans = new StringBuilder
    map.forEach((k, v) => {
      val sb = new StringBuilder
      v.forEach(s => {
        sb ++= "," + s
      })
      if (sb.nonEmpty) {
        ans ++= k + "|" + sb.toString().substring(1) + "\n"
      }
    })
    FileUtils.write(new File("topics/FPGrowthList.csv"), ans.toString(), false)
  }
}
