package cas.ihep.spark.context;

import cas.ihep.spark.io.Packet;
import cas.ihep.spark.loop.IterationNetServer;
import cas.ihep.spark.loop.IterationServer;
import cas.ihep.spark.loop.IterativeRunnable;
import cas.ihep.spark.util.Counter;
import cas.ihep.spark.util.GlobalConfiguration;
import cas.ihep.spark.util.function.Function2;
import org.apache.spark.SparkContext;
import org.apache.spark.scheduler.SparkListener;
import org.apache.spark.scheduler.SparkListenerExecutorAdded;
import org.apache.spark.scheduler.SparkListenerJobEnd;
import org.apache.spark.scheduler.SparkListenerTaskStart;

import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

public final class JSparkIterationListener<Send, Receive> extends SparkListener {

    private Thread child;
    private SparkContext context;
    private IterativeRunnable<Send, Receive> serverFunc;
    private int partitions;
    private Map<String, SocketAddress> executor2host,task2host;
    private SocketAddress serverAddr;

    public JSparkIterationListener(SparkContext sc, IterativeRunnable<Send, Receive> lr, int p, SocketAddress server){
        context=sc;
        serverFunc=lr;
        partitions=p;
        executor2host=new ConcurrentHashMap<>();
        task2host=new ConcurrentHashMap<>();
        serverAddr=server;
    }

    public void onJobEnd(SparkListenerJobEnd jobEnd){
        context.removeSparkListener(this);
        try{
            child.join();
        }catch (InterruptedException ignored){}
    }

    public void onExecutorAdded(SparkListenerExecutorAdded ea){
        executor2host.put(ea.executorId(),new InetSocketAddress(ea.executorInfo().executorHost(),8999));
    }
    public void onTaskStart(SparkListenerTaskStart ts){
        task2host.put(ts.taskInfo().id(),executor2host.get(ts.taskInfo().executorId()));
        if(task2host.size()==partitions){
            Counter<SocketAddress> numOfTasks=new Counter<>();
            for(Map.Entry<String,SocketAddress> entry:task2host.entrySet()){
                numOfTasks.count(entry.getValue());
            }
            child=new Thread("Loop-Worker"){
                public void run(){
                    try (final IterationNetServer server= IterationNetServer.newServer(GlobalConfiguration.getProperty("spark.loop.type","udp"),
                            serverAddr,numOfTasks)){
                        server.heartbeat();
                        serverFunc.run(new IterationServer<Send, Receive>() {
                            int iters=0;
                            @Override
                            public Receive broadcast(Send values, Receive zero, Function2<Receive, Receive, Receive> agg) {
                                try {
                                    return server.iterate(Packet.wrapDataRequestPacket(values,iters++),partitions,zero,agg);
                                }catch (IOException e){
                                    throw new RuntimeException(e);
                                }
                            }
                        });
                        server.quit();
                    }catch (Exception e){
                        e.printStackTrace();
                    }
                }
            };
            child.start();
        }
    }
}
