package edu.xidian.sei.local

import java.text.SimpleDateFormat
import java.util.Date

import akka.actor.{Actor, ActorLogging}
import edu.xidian.sei.util.Stopwatch
import edu.xidian.sei._

case class ReportRequest(filePrefix: String, dataSize: Int)

case class CollectProcess(receivedDataCount: Int, expectedDataCount: Int) {
  def finished: Boolean = {
    receivedDataCount == expectedDataCount
  }
}

case object ProcessRequest

class ResultCollector(reporter: Reporter, expectedDataCount: Int) extends Actor with ActorLogging {
  val data2Cluster = new collection.mutable.HashMap[Grid, DefaultCluster]
  val clusterMap = new collection.mutable.HashMap[Index, DefaultCluster]
  var receivedDataCount = 0

  val start = System.nanoTime()

  private def miniClusterMerge(cluster1: DefaultCluster, cluster2: DefaultCluster): Unit = {

    clusterMap.remove(cluster2.id)
    cluster1.merge(cluster2)

    cluster1.objects ++= cluster2.objects
    cluster2.objects foreach { o => data2Cluster.put(o, cluster1) }
  }


  def receive = {
    case Tuple2(grid: Grid, clusterID: Index) =>
      val clusterOption = clusterMap.get(clusterID)
      val cluster = if (clusterOption.nonEmpty) clusterOption.get else DefaultCluster(clusterID)
      cluster.objects.add(grid)
      data2Cluster.put(grid, cluster)
      receivedDataCount += 1
      //log.info(s"receive data  $receivedDataCount/$expectedDataCount using ${Stopwatch.format(System.nanoTime - start, 4)}")
      tryReport()

    case ProcessRequest =>
      sender ! CollectProcess(receivedDataCount, expectedDataCount)
      if (receivedDataCount == expectedDataCount) {
        context.stop(self)
      }

    case _ => throw new RuntimeException("Unknow message type")
  }

  private def tryReport(): Unit = {
    val sdf = new SimpleDateFormat("yyyy.MM.dd-HH:mm:ss.SSS")
    if (receivedDataCount == expectedDataCount) {
      //mergeCluster
      //println("clusters size:" + clusterSet.size)
      //reporter.report(data2Cluster)
      val elapse = System.nanoTime() - start
      println(reporter.asInstanceOf[FileReporter].fileName + ":" + sdf.format(new Date))
      log.debug(s"clusting $expectedDataCount matched data using:" + Stopwatch.format(elapse, 4))
    }
  }

  private def mergeCluster() {
    if (clusterMap.size > 2) {
      for (cluster <- clusterMap.values) {
        cluster.compute
      }
      val clusterSortedList = clusterMap.values.toList.sortWith((cluster1, cluster2) => cluster1.density > cluster2.density)
      (0 until (if (clusterSortedList.size > 10) 10 else clusterSortedList.size)).foreach(i => {
        println("density:" + clusterSortedList(i).density)
      })
      println(clusterSortedList(0).objects.size)
      //      clusterSortedList(0).objects.foreach(g => {
      //        val grid = g.asInstanceOf[Grid]
      //        println("index " + grid.id.dimensions.mkString(",") + " " + grid.density)
      //      })
      val adjacentDifference = clusterSortedList.zip(clusterSortedList.tail).map(p => (p._2, (p._1.density - p._2.density) / p._2.density))
      var max: (DefaultCluster, BigDecimal) = (clusterSortedList(0), BigDecimal(0))
      adjacentDifference.foreach(p => if (p._2 > max._2) {
        max = p
      })
      val bigClusters = clusterSortedList.filter(x => x.density >= max._1.density)
      val miniclusters = clusterSortedList.filter(x => x.density < max._1.density)
      miniclusters.foreach { minicluster =>
        val distances = bigClusters.map { bigCluster => (Location.twoArrayDistance(bigCluster.center, minicluster.center), bigCluster) }
        val firstCluster = distances.sortWith((distance1, distance2) => distance1._1 < distance2._1)(0)._2
        miniClusterMerge(firstCluster, minicluster)
        //        delete(minicluster)
      }
    }
  }

}