package com.fudan.run;

import java.io.PrintStream;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;

import com.fudan.cfg.RunConfiguration;
import com.fudan.cfg.base.ActionDef;
import com.fudan.cfg.base.DatasetDef;
import com.fudan.cfg.base.RunProperties;
import com.fudan.cfg.base.TransDef;
import com.fudan.ctx.EnvCtx;
import com.fudan.run.action.ActionHandler;
import com.fudan.run.dataset.DatasetHandler;
import com.fudan.run.trans.TransHandler;

public class JobRunner {

	private SparkConf conf;
	private SparkSession sparkSession;
	private JavaSparkContext javaSparkContext;
	private VariableRddMap variableMap;


	public JobRunner setCtx(PrintStream printer) {
		EnvCtx.envOut = printer;
		return this;
	}
	public JobRunner buildRunEnv(RunConfiguration runConfig) {
		RunProperties p = runConfig.getRunProperties();
		this.conf = new SparkConf()
				.setAppName(p.getStringConfigOrDefault("spark.app-name", "default-app-name"))
				.setMaster(p.getStringConfigOrDefault("spark.master", "local[*]"));

		Map<String, Object> sparkConfig = p.getConfigMap("spark");
		Set<String> configKeys = sparkConfig.keySet();
		for (String ck : configKeys) {
			if("app-name".equals(ck) || "master".equals(ck)) {
				continue;
			}
			this.conf.set(ck, sparkConfig.get(ck).toString());
		}
		this.sparkSession = SparkSession.builder().config(conf).getOrCreate();
		this.javaSparkContext = new JavaSparkContext(this.sparkSession.sparkContext());
		variableMap = new VariableRddMap();
		return this;
	}

	public JobRunner buildDataset(RunConfiguration runConfiguration) {
		List<DatasetDef> datasetNodeList = runConfiguration.getDatasetNodeList();
		for (DatasetDef datasetNode : datasetNodeList) {
			JavaRDD<Map<String,Object>> javaRDD = DatasetHandler.build(datasetNode,runConfiguration).rdd(this,datasetNode);
			if(datasetNode.getName() == null || "".equals(datasetNode.getName())) {
				throw new RuntimeException("rdd未命名: "+System.lineSeparator()+datasetNode);
			}
			variableMap.addRdd(datasetNode.getName(), javaRDD);
		}
		return this;
	}

	public JobRunner doTrans(RunConfiguration runConfiguration) {
		List<TransDef> transNodeList = runConfiguration.getTransNodeList();
		if(transNodeList!=null) {
			for (TransDef transNode : transNodeList) {
//				TransHandler<? super TransNode> build = (TransHandler<? super TransNode>) TransHandler.build(transNode,runConfiguration);
//				build.trans(transNode, variableMap);
				TransHandler.build(transNode,runConfiguration).trans(this,transNode, variableMap);
			}
		}
		return this;
	}

	public JobRunner doAction(RunConfiguration runConfiguration) {
		List<ActionDef> actionNodeList = runConfiguration.getActionNodeList();
		if(actionNodeList != null) {
			for (ActionDef actionNode : actionNodeList) {
				ActionHandler.build(actionNode,runConfiguration).action(actionNode, variableMap,runConfiguration);
			}
		}
		return this;
	}

	public JobRunner closeCtx() {
		if(this.javaSparkContext != null) {
			this.javaSparkContext.close();
		}
		if(this.sparkSession != null) {
			this.sparkSession.close();
		}
		return this;
	}


	public VariableRddMap getVariableMap() {
		return variableMap;
	}

	public SparkSession getSparkSession() {
		return sparkSession;
	}

	public void setSparkSession(SparkSession sparkSession) {
		this.sparkSession = sparkSession;
	}

	public JavaSparkContext getJavaSparkContext() {
		return javaSparkContext;
	}

	public void setJavaSparkContext(JavaSparkContext javaSparkContext) {
		this.javaSparkContext = javaSparkContext;
	}
}
