package com.asiainfo.dacp.datastash.worker.executor.adapter.datax.translate;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import com.asiainfo.dacp.metamodel.domain.ds.repo.MetaDataSourceRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.asiainfo.dacp.datastash.domain.unit.models.DataStashUnit;
import com.asiainfo.dacp.datastash.domain.unit.models.DataStashUnitStep;
import com.asiainfo.dacp.datastash.domain.unit.repos.DataStashUnitRepository;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.Constant;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.PluginCatalogType;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.PluginType;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.domain.Job;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.domain.JobId;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.domain.Step;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.domain.StepPair;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.FtpReader;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.FtpWriter;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.HdfsReader;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.HdfsWriter;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.MysqlWriter;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.RdbmsReader;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.RdbmsWriter;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.UnStructuredReader;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.UnStructuredWriter;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.meta.DpMeta;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.meta.FtpMeta;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.model.plugin.meta.HdfsMeta;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.translate.datasource.DataSourceStrategy;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.utils.CommonErrorCode;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.utils.Configuration;
import com.asiainfo.dacp.datastash.worker.executor.adapter.datax.utils.WorkerException;
import com.asiainfo.dacp.metamodel.domain.ds.model.MetaDataSource;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

@Service
public class JdbcTranslate extends JobTranslate {

	private static final Logger LOG = LoggerFactory.getLogger(JdbcTranslate.class);

	@Autowired
	private DataStashUnitRepository unitRepository;

	@Autowired
	private DataSourceStrategy dsGetter;

	@Autowired
	private MetaDataSourceRepository metaDataSourceRepository;
	@Override
	public Job translate(JobId id) {
		DataStashUnit unit = null;
		Job job = new Job(id);

		try {
			unit = unitRepository.findByCode(id.getUnitid());
			// unit = unitRepository.findOne(id.getUnitid());
			Preconditions.checkNotNull(unit);
		} catch (Exception e) {
			throw WorkerException.asDataXException(CommonErrorCode.RUNTIME_ERROR,
					String.format("数据库获取Unid: [%s] 接口单元信息失败 .", id.getUnitid()), e);
		}
		List<DataStashUnitStep> steps = unit.getUnitSteps();
		LOG.info(String.format("获得接口单元信息： \n%s", JSON.toJSONString(unit, SerializerFeature.PrettyFormat)));
		if (steps == null || steps.isEmpty()) {
			throw WorkerException.asDataXException(CommonErrorCode.RUNTIME_ERROR,
					String.format("数据库获取Unid: [%s] 接口单元步骤信息失败 .", steps.size()));
		}
		job.setSteps(buildStepPair(steps));
		return job;
	}

	public List<StepPair> buildStepPair(List<DataStashUnitStep> steps) {
		List<StepPair> paris = Lists.newArrayList();
		Map<String, DataStashUnitStep> maps = Maps.newHashMap();
		for (DataStashUnitStep step : steps) {
			if("datax".equals(step.getStepType()) || Strings.isNullOrEmpty(step.getStepType())){ 
				maps.put(step.getStepIndex(), step);
			}
		}
		for (DataStashUnitStep step : steps) {
			String stepInst = step.getStepInst();
			PluginType plugin = null;
			try {
				plugin = PluginType.valueOf(stepInst);
			} catch (IllegalArgumentException e) {
				LOG.warn("非法的插件类型 [{}] .", step.getId() + "-" + stepInst);
				continue;
			}
			switch (plugin.getCatalog()) {
			case READER:
				Step reader = new Step(stepInst, PluginCatalogType.READER);
				reader.setParamter(translate(step));
				DataStashUnitStep writerSetp = maps.get(step.getNextStepIdx());
				if (writerSetp == null) {
					throw WorkerException.asDataXException(CommonErrorCode.RUNTIME_ERROR,
							String.format("Reader [%s] 对应的writer未获得到，检查step依赖配置 .", stepInst));
				}
				try {
					PluginType.valueOf(writerSetp.getStepInst());
				} catch (IllegalArgumentException e) {
					throw WorkerException.asDataXException(CommonErrorCode.RUNTIME_ERROR,
							String.format("writer插件 [%s] 为非支持插件类型.", writerSetp.getStepInst()));
				}
				Step writer = new Step(writerSetp.getStepInst(), PluginCatalogType.WRITER);
				writer.setParamter(translate(writerSetp));
				StepPair stepPair = new StepPair(reader, writer);
				paris.add(stepPair);
				break;
			case DPEXECUTOR:
				Step dp = new Step(stepInst, PluginCatalogType.DPEXECUTOR);
				dp.setParamter(translate(step));
				StepPair stepPair2 = new StepPair(dp);
				paris.add(stepPair2);

			default:
				continue;
			}
		}
		if (paris.size() == 0) {
			throw WorkerException.asDataXException(CommonErrorCode.RUNTIME_ERROR,
					String.format("构建 Reader－Writer 步骤对失败 Step Size[%d].", steps.size()));
		}
		return paris;
	}

	public Object translate(DataStashUnitStep step) {
		String stepInst = step.getStepInst();

		LOG.info(String.format("获得 %s 步骤信息： \n%s", step.getStepLabel(), JSON.toJSONString(step.getStepCfgVal(), SerializerFeature.PrettyFormat)));

		Preconditions.checkNotNull(step.getStepCfgVal(), String.format("插件%s 配置信息为空. ", step.getStepInst()));
		Configuration unitConf = Configuration.from(step.getStepCfgVal());
		unitConf.set("pluginType", stepInst);

		Serializable obj = null;
		switch (PluginType.valueOf(stepInst)) {
		case mysqlreader:
		case oraclereader:
		case db2reader:
		case hivereader:
		case rdbmsreader:
			obj = buildRdbmsReader(unitConf);
			break;
		case mysqlwriter:
			obj = buildMysqlWriter(unitConf);
			break;
		case oraclewriter:
		case db2writer: 
		case rdbmswriter:
			obj = buildRdbmsWriter(unitConf);
			break;
		case ftpreader:
			obj = buildFtpReader(unitConf);
			break;
		case hdfsreader:
			obj = buildHdfsReader(unitConf);
			break;
		case txtfilereader:
			obj = buildUnStructuredReader(unitConf);
			break;
		case ftpwriter:
			obj = buildFtpWriter(unitConf);
			break;
		case hdfswriter:
			obj = buildHdfsWriter(unitConf);
			break;
		case txtfilewriter:
			obj = buildUnStructuredWriter(unitConf);
			break;
		case dpexcute:
			obj=buildDp(unitConf);
			break;
		default:
		}
		return obj;
	}
	private Serializable buildDp(Configuration unitConf) {
		DpMeta dpMeta=buildDpMeta(unitConf);
		return dpMeta;
	}
	private Serializable buildFtpWriter(Configuration unitConf) {
		FtpWriter ftpWriter = new FtpWriter(buildUnStructuredWriter(unitConf));
		MetaDataSource ftpDs = dsGetter.getDataSource(unitConf);
		FtpMeta ftpMeta = buildFtpMeta(ftpDs);
		BeanUtils.copyProperties(ftpMeta, ftpWriter);
		return ftpWriter;
	}

	private Serializable buildMysqlWriter(Configuration conf) {
		MysqlWriter mysqlWriter = new MysqlWriter(buildRdbmsWriter(conf));
		mysqlWriter.setWriteMode(conf.getString("writeMode", "insert"));
		return mysqlWriter;
	}

	public FtpReader buildFtpReader(Configuration conf) {
		FtpReader ftp = new FtpReader(buildUnStructuredReader(conf));
		MetaDataSource ftpDs = dsGetter.getDataSource(conf);
		FtpMeta ftpMeta = buildFtpMeta(ftpDs);
		BeanUtils.copyProperties(ftpMeta, ftp);
		ftp.setMaxTraversalLevel(conf.getInt("maxTraversalLevel"));
		return ftp;
	}

	public HdfsReader buildHdfsReader(Configuration conf) {
		HdfsReader hdfs = new HdfsReader(buildUnStructuredReader(conf));
		MetaDataSource hdfsDs = dsGetter.getDataSource(conf);
		HdfsMeta buildHdfsMeta = buildHdfsMeta(hdfsDs, conf);
		BeanUtils.copyProperties(buildHdfsMeta, hdfs);
		return hdfs;
	}

	private Serializable buildHdfsWriter(Configuration conf) {
		HdfsWriter hdfs = new HdfsWriter(buildUnStructuredWriter(conf));
		MetaDataSource hdfsDs = dsGetter.getDataSource(conf);
		HdfsMeta buildHdfsMeta = buildHdfsMeta(hdfsDs, conf);
		
		Object colobj = conf.get("column"); 
		if (colobj != null && colobj instanceof List) {
			@SuppressWarnings("unchecked")
			List<Object> listobj = (List<Object>) colobj;
			List<HdfsWriter.Column> cols = buildhdfsWriterCols(listobj);
			hdfs.setColumn(cols);
		}  
		
		BeanUtils.copyProperties(buildHdfsMeta, hdfs);
		return hdfs;
	}

	public RdbmsReader buildRdbmsReader(Configuration conf) {
		RdbmsReader rdbmsReader = new RdbmsReader();

		MetaDataSource dbDs = dsGetter.getDataSource(conf);
		String dsConfStr = dbDs.getDsConf();
		Configuration dsConf = Configuration.from(dsConfStr);
		String jdbcUrl = dsConf.getNecessaryValue("url", CommonErrorCode.CONFIG_ERROR);
		rdbmsReader.initDataSource(dbDs.getDsAcct(), dbDs.getDsAuth(), jdbcUrl);

		Splitter on = Splitter.on(',');
		rdbmsReader.setColumn(Lists.newArrayList(on.split(conf.getString("column", "*"))));
		rdbmsReader.setWhere(conf.getString("where"));
		rdbmsReader.setFetchSize(conf.getInt("fetchSize"));
		if(!Strings.isNullOrEmpty(conf.getString("table"))){
			rdbmsReader.setTable(Lists.newArrayList(conf.getString("table")));
		}
		rdbmsReader.setQuerySql(Lists.newArrayList(conf.getString("querySql")));
		return rdbmsReader;
	}

	public RdbmsWriter buildRdbmsWriter(Configuration conf) {
		RdbmsWriter rdbmsWriter = new RdbmsWriter();

		MetaDataSource dbDs = dsGetter.getDataSource(conf);
		String dsConfStr = dbDs.getDsConf();
		Configuration dsConf = Configuration.from(dsConfStr);
		String jdbcUrl = dsConf.getNecessaryValue("url", CommonErrorCode.CONFIG_ERROR);
		rdbmsWriter.initDataSource(dbDs.getDsAcct(), dbDs.getDsAuth(), jdbcUrl);

		Splitter on = Splitter.on(',');
		rdbmsWriter.setColumn(Lists.newArrayList(on.split(conf.getString("column", "*"))));
		String postSql = conf.getString("postSql");
		if (!Strings.isNullOrEmpty(postSql)) {
			rdbmsWriter.setPostSql(Lists.newArrayList(on.split(postSql)));
		}
		String preSql = conf.getString("preSql");
		if (!Strings.isNullOrEmpty(preSql)) {
			rdbmsWriter.setPostSql(Lists.newArrayList(on.split(preSql)));
		}
		rdbmsWriter.setTable(Lists.newArrayList(conf.getNecessaryValue("table", CommonErrorCode.CONFIG_ERROR)));
		return rdbmsWriter;
	}

	@SuppressWarnings("unchecked")
	public UnStructuredReader buildUnStructuredReader(Configuration conf) {
		UnStructuredReader reader = new UnStructuredReader();


		Object colobj = conf.get("column");
		if (colobj == null || colobj instanceof String) {
			reader.processColumn(Lists.newArrayList("*"));
		}
		if (colobj != null && colobj instanceof List) {
			List<Object> listobj = (List<Object>) colobj;
			if(listobj.size() == 0 || (listobj.size() > 0 && "*".equals(listobj.get(0).toString()))){
				reader.processColumn(Lists.newArrayList("*"));
			}else{
				List<UnStructuredReader.Column> cols = buildReaderCols(listobj);
				reader.processColumn(cols);
			}
		} else {
			reader.processColumn(Lists.newArrayList("*"));
		}
		String fieldDelimiter = conf.getString("fieldDelimiter");
		if(!Strings.isNullOrEmpty(fieldDelimiter)){
			reader.setFieldDelimiter(fieldDelimiter.trim());
		}
		reader.setCompress(conf.getString("compress"));
		reader.setEncoding(conf.getString("encoding"));
		reader.setSkipHeader(conf.getInt("skipHeader", 0) == 0);
		reader.setNullFormat(conf.getString("nullFormat"));
		reader.setCsvReaderConfig(conf.getMap("csvReaderConfig"));

		// 外部path替换
		//0 否 1 是
		boolean isRelyPre = conf.getInt("isRelyPre", 0) != 0; 
		if (isRelyPre) {
			List<Configuration> list = Constant.GLOBAL_CONF.getListConfiguration("taskContent");
			List<String> paths = Lists.newArrayList();
			Preconditions.checkArgument(list!=null && list.size()>0, "外部参数taskContent为空 .");
			for(Configuration path : list){
				paths.add(path.getString("content"));
				conf.set("dsName", path.getString("dsName"));
			}
			reader.setPath(Lists.newArrayList(paths)); 
		}else{
			reader.processPath(conf.getNecessaryValue("path", CommonErrorCode.CONFIG_ERROR));
		}
		
		return reader;
	}

	private List<UnStructuredReader.Column> buildReaderCols(List<Object> listobj) {
		List<UnStructuredReader.Column> cols = Lists.newArrayList();
		for (Object obj : listobj) {
			if (obj instanceof Map) {
				@SuppressWarnings("rawtypes")
				Map mapobj = (Map) obj;
				String coltype = (String) mapobj.get("type");
				String colvalue = (String) mapobj.get("value");
				if (Strings.isNullOrEmpty(colvalue)) {
					int index = Integer.valueOf((String) mapobj.get("index"));
					Preconditions.checkArgument(index >= 0,
							String.format("列参数配置错误value和index必须配置一个[%s].", JSON.toJSONString(obj)));
					cols.add(new UnStructuredReader.IndexColumn(index, coltype, (String) mapobj.get("format")));
				} else {
					cols.add(new UnStructuredReader.ValueColumn(colvalue, coltype, (String) mapobj.get("format")));
				}
			}
		}
		return cols;
	}

	private List<HdfsWriter.Column> buildhdfsWriterCols(List<Object> listobj) {
		List<HdfsWriter.Column> cols = Lists.newArrayList();
		for (Object obj : listobj) {
			if (obj instanceof Map) {
				@SuppressWarnings("rawtypes")
				Map mapobj = (Map) obj;
				String coltype = (String) mapobj.get("type");
				String colname = (String) mapobj.get("name");
				cols.add(new HdfsWriter.Column(coltype, colname));
			}
		}
		return cols;
	}

	public UnStructuredWriter buildUnStructuredWriter(Configuration conf) {
		UnStructuredWriter writer = new UnStructuredWriter();
		writer.setPath(conf.getNecessaryValue("path", CommonErrorCode.CONFIG_ERROR));
		writer.setFileName(conf.getString("fileName"));
		writer.setWriteMode(conf.getString("writeMode"));
		if(!Strings.isNullOrEmpty(conf.getString("fieldDelimiter"))){
			writer.setFieldDelimiter(conf.getString("fieldDelimiter").trim());
		}
		writer.setCompress(conf.getString("compress"));
		writer.setEncoding(conf.getString("encoding"));
		if(!Strings.isNullOrEmpty(conf.getString("nullFormat"))){
			writer.setNullFormat(conf.getString("nullFormat").trim());
		}
		writer.setDateFormat(conf.getString("dateFormat"));
		if(!Strings.isNullOrEmpty(conf.getString("fileFormat"))){
			writer.setFileFormat(conf.getString("fileFormat").trim());
		}
		if(!Strings.isNullOrEmpty(conf.getString("header"))){
			Splitter on = Splitter.on(',');
			writer.setHeader(Lists.newArrayList(on.split(conf.getString("header", ""))));
		}
		return writer;
	} 
	
	public List<MetaDataSource> findAll(){
		try {
			List<MetaDataSource> dbTypeList=new ArrayList<MetaDataSource>();

			dbTypeList.addAll( metaDataSourceRepository.findListByDsCategory("rdb"));
			dbTypeList.addAll( metaDataSourceRepository.findListByDsCategory("hive"));
			return dbTypeList;
		}catch (Exception e){
			e.printStackTrace();
			return null;
		}
	}
	public List<MetaDataSource> findAllFtp(){
		try {
			List<MetaDataSource> dbTypeList=new ArrayList<MetaDataSource>();

			dbTypeList.addAll( metaDataSourceRepository.findListByDsCategory("ftp"));
			return dbTypeList;
		}catch (Exception e){
			e.printStackTrace();
			return null;
		}
	}
}

