package edu.uw.nemo.esu;

import edu.uw.nemo.io.Parser;
import edu.uw.nemo.model.Mapping;
import edu.uw.nemo.model.Metrics;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;

import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;

/**
 * Spark environment setup for ESU
 *
 * Created by joglekaa on 9/9/14.
 */
public class SparkGenerator {

    static  Logger logger = Logger.getLogger(SparkGenerator.class);
    static String padding = "********************************************************";

    public static void main(String[] args) throws IOException, URISyntaxException {
        System.out.println(padding + " SMR start main");
        long start = System.currentTimeMillis();
        SparkGenerator sparkGenerator = new SparkGenerator();
        Metrics metrics = null;
        if (args.length > 3) {
            metrics = sparkGenerator.extract(args[0], Integer.parseInt(args[1]), Integer.parseInt(args[2]), args[3]);
        } else {
            metrics = sparkGenerator.extract(args[0], Integer.parseInt(args[1]), Integer.parseInt(args[2]), null);
        }
        long end = System.currentTimeMillis();
        metrics.addMetric(Metrics.TOTAL_DURATION, (end - start));
        metrics.print();
        System.out.println(padding + " SMR done main. Enumerated [" + metrics.get(Metrics.COUNT) + "] motifs in [" + metrics.get(Metrics.ENUMERATION_DURATION) + "] milliseconds.");
    }

    public void wrapper(String[] args) throws IOException, URISyntaxException {
        extract("full_scere_20140427.csv", 3, args[0], args[1], 113, "cluster_run_20150411_2");
    }

    public static void wrapper2(String[] args) throws IOException, URISyntaxException {
        SparkGenerator sparkGenerator = new SparkGenerator();
        sparkGenerator.extract(args[0], Integer.parseInt(args[1]), args[2], args[3], Integer.parseInt(args[4]), "cluster_run_20150411_2");
    }

    public Metrics extract(String fileName, int k, String appName, String master, int partitions, String outputFileName) throws IOException, URISyntaxException {
        logger.debug(padding + " SMR start extract - with params");
        logger.debug(padding + " SMR start loading context");
        SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master);

//        configureKryo(sparkConf);

        JavaSparkContext sc = new JavaSparkContext(sparkConf);
        logger.debug(padding + " SMR done loading context");

        Metrics metrics = runOnSpark(fileName, sc, k, partitions, outputFileName);
        logger.debug(padding + " SMR done");

        return metrics;
    }

    public Metrics extract(String fileName, int k, int nodes, String outputFileName) throws IOException, URISyntaxException {
        logger.debug(padding + " SMR start extract - just filename");
        logger.debug(padding + " SMR start loading context");
        SparkConf sparkConf = new SparkConf().setAppName("SparkySimpleMapReduce");

//        configureKryo(sparkConf);

        JavaSparkContext sc = new JavaSparkContext(sparkConf);

        int partitions = nodes * ( ( k > 3) ? k : 3 );
        logger.debug(padding + " SMR done loading context");
        logger.debug(padding + " extracting motifs having " + k + " vertices, with + " + nodes + " nodes, using " + partitions + " partitions.");

        Metrics metrics = runOnSpark(fileName, sc, k, partitions, outputFileName);
        metrics.addMetric(Metrics.K, k);
        metrics.addMetric(Metrics.N, nodes);

        logger.debug(padding + " SMR done");
        return metrics;
    }

    private Metrics runOnSpark(String fileName, JavaSparkContext sc, int k, int partitions, String outputFileName) throws IOException, URISyntaxException {
        logger.debug(padding + " starting run on spark using [" + sc.getConf().get("spark.executor.cores", "1") + "] executors.");

        long parseStart = System.currentTimeMillis();
        Parser parser = new Parser();
        Mapping mapping = parser.parse(fileName, sc);

        List<Integer> vertices = mapping.getVertices(k, partitions);

        JavaRDD<Integer> rdd = sc.parallelize(vertices, partitions);

        Map<Integer, Set<Integer>> adj = mapping.getAdj();
        Broadcast<Map<Integer, Set<Integer>>> broadcastAdj = sc.broadcast(adj);
        long dataStaged = System.currentTimeMillis();

        ESUCounter gen = new ESUCounter(new ESUGen());
        Metrics metrics = gen.countSubgraphs(broadcastAdj, rdd, k, outputFileName);

        logger.debug(padding + " external count = " + metrics.get(Metrics.COUNT));
        sc.stop();
        long enumerationComplete = System.currentTimeMillis();
        metrics.addMetric(Metrics.LOAD_DURATION, (dataStaged - parseStart));
        metrics.addMetric(Metrics.ENUMERATION_DURATION, (enumerationComplete - parseStart));

        return metrics;
    }

}
