package com.fudan.run.dataset;

import java.io.Serializable;
import java.util.Map;

import org.apache.spark.api.java.JavaRDD;

import com.fudan.cfg.RunConfiguration;
import com.fudan.cfg.base.DatasetDef;
import com.fudan.run.JobRunner;
import com.fudan.run.ctx.DatasetContext;

public abstract class DatasetHandler<T extends DatasetDef> implements Serializable{

	public abstract JavaRDD<Map<String,Object>> rdd(JobRunner runContext,T datasetNode);


	public static DatasetHandler<? super DatasetDef> build(DatasetDef datasetNode,RunConfiguration runConfiguration) {
		String type = datasetNode.getType();
		DatasetContext datasetContext = runConfiguration.getDatasetContext();
		Class<? extends DatasetHandler<? extends DatasetDef>> handlerClz = datasetContext.getHandler(type);
		if(handlerClz != null) {
			try {
				return (DatasetHandler<? super DatasetDef>) handlerClz.newInstance();
			} catch (InstantiationException | IllegalAccessException e) {
				throw new RuntimeException(String.format("构建dataset的处理器失败, type:%s",type),e);
			}
		}
		throw new RuntimeException(String.format("没有匹配的dataset的处理器, type:%s",type));
	}
}
