package com.hadwinling.alogriithm.egone

import breeze.linalg.sum
import breeze.numerics.pow
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object egone {
  val c1 = 1.5 // 加速因子
  val c2 = 1.5
  var w = 0.8

  val maxgen = 200 //进化次数
  val sizepop =100 //种群规模

  val Vmax = 10
  val Vmin = -10

  val popmax = 20
  val popmin = -20

  val dim= 10

  var gb: (Double, (Array[Double], Array[Double])) = (Double.PositiveInfinity, (Array.fill(dim)(0.0), Array.fill(dim)(0.0)))
  var t = 0 //loop counter
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("geone")
    var sc = new SparkContext(conf)

    //初始化粒子
    //(维数，（位置，速度，当前）)
    val intiRDD:RDD[(Int,(Array[Double],Array[Double],Array[Double]))] =sc.parallelize(List.range(maxgen,sizepop),4)
      .map(num => (num, (Array.fill(dim)(Random.nextDouble()), Array.fill(dim)(Random.nextDouble()), Array.fill(dim)(Random.nextDouble()))))

    //初始化粒子的最优位置和速度
    var particles_fitness :RDD[(Int, (Array[Double], Array[Double], Double, Array[Double], Double)) ]=intiRDD.mapPartitions(addFitness).persist()
    var particlesBC: Broadcast[collection.Map[Int, (Array[Double], Array[Double], Array[Double])]] = sc.broadcast(particles_fitness.map(x => (x._1, (x._2._1, x._2._2, x._2._4))).collectAsMap)
    gb = particles_fitness.map(x => (x._2._3, (x._2._1, x._2._2))).sortByKey(true).take(1)(0)

    var gbBC: Broadcast[(Double, (Array[Double], Array[Double]))] = sc.broadcast(gb)
    t = t + 1

  }
  def functions (pos:Array[Double]): Double ={
    sum(pos.map(pos=>pow(pos,2) ))
  }

  def addFitness(particle: Iterator[(Int, (Array[Double], Array[Double], Array[Double]))])={
    particle.map {
      x =>
        val fitness = functions(x._2._1)
        val pb_fitness = functions(x._2._3)
        //  (num,(pos,vel,fitness,pb_pos,pb_fitness))
        (x._1, (x._2._1, x._2._2, fitness, x._2._3, pb_fitness))
    }
  }




}
