package com.weishao.learn;

import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.tableinput.TableInputMeta;
import org.pentaho.di.trans.steps.tableoutput.TableOutputMeta;
import org.pentaho.di.trans.steps.valuemapper.ValueMapperMeta;

/**
 *  基于Kettle的数据映射转换
 *  
 *  备注：测试使用数据样例：
 *  ==================================
 *  来源表结构
 *  
	CREATE TABLE `student_source` (
	  `id` int(11) NOT NULL COMMENT '主键ID',
	  `student_number` varchar(128) NOT NULL COMMENT '学工号',
	  `student_name` varchar(255) NOT NULL COMMENT '姓名',
	  `sex` int(11) NOT NULL COMMENT '性别：1-boy;2-girl',
	  PRIMARY KEY (`id`)
	) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
	
	INSERT INTO `student_source` VALUES ('1', '1001', '张三', '1');
	INSERT INTO `student_source` VALUES ('2', '1002', '李四', '2');
	INSERT INTO `student_source` VALUES ('3', '1003', '王五', '1');
	INSERT INTO `student_source` VALUES ('4', '1004', '沈六', '2');
 *  ==================================
 *  目的表结构
	CREATE TABLE `student_target` (
	  `bh` int(11) NOT NULL COMMENT '主键ID',
	  `xgh` varchar(128) NOT NULL COMMENT '学工号',
	  `xm` varchar(255) NOT NULL COMMENT '姓名',
	  `sex` varchar(11) NOT NULL COMMENT '性别：boy;girl'
	) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

 * @author tang
 *
 */
public class DataMapperApplication {

	public static void main(String[] args) throws KettleException {
		String sourceQuerySql="SELECT id, student_number, student_name, sex FROM test_for_tang.student_source ";
		String targetSchemaName="test_for_tang";
		String targetTableName="student_target";
		
		//配置映射对应关系
		//配置字段映射关系
		Map<String, String> fieldsMapper=new HashMap<String,String>();
		fieldsMapper.put("id", "bh");
		fieldsMapper.put("student_number", "xgh");
		fieldsMapper.put("student_name", "xm");
		fieldsMapper.put("sex", "sex");
		
		//配置值映射关系
		Map<String, Map<String, String>> valuesMapper=new HashMap<>();
		Map<String, String> sex=new HashMap<String, String>();
		sex.put("1", "boy");
		sex.put("2", "girl");
		valuesMapper.put("sex", sex);
		
		KettleEnvironment.init();
		
		TransMeta transMeta = new TransMeta();
		transMeta.setName("Transformation");// 设置转化的名称
		
		DatabaseMeta sourcedb = new DatabaseMeta("source", "mysql", "Native(JDBC)", "172.17.207.210", "test_for_tang",	"3306", "tangyibo", "tangyibo");
		DatabaseMeta targetdb = new DatabaseMeta("source", "mysql", "Native(JDBC)", "172.17.207.210", "test_for_tang",	"3306", "tangyibo", "tangyibo");
		sourcedb.addExtraOption(sourcedb.getPluginId(), "characterEncoding", "utf8"); 
		targetdb.addExtraOption(targetdb.getPluginId(), "characterEncoding", "utf8"); 
		sourcedb.setQuoteAllFields(true);
		targetdb.setQuoteAllFields(true);

		// 将两个数据库配置加入作业中
		transMeta.addDatabase(sourcedb);
		transMeta.addDatabase(targetdb);

		// 配置表输入
		TableInputMeta t_input = new TableInputMeta();
		t_input.setDatabaseMeta(sourcedb);
		t_input.setSQL(sourceQuerySql);// 设置查询来源端数据库的SQL语句

		// 将表输入加入转换步骤中
		StepMeta input = new StepMeta("SourceTableInput", t_input);
		transMeta.addStep(input);

		// 配置表输出
		TableOutputMeta t_output = new TableOutputMeta();
		t_output.setDatabaseMeta(targetdb);
		t_output.setSchemaName(targetSchemaName);
		t_output.setTableName(targetTableName);
		t_output.setTruncateTable(true);
		t_output.setCommitSize(50000);
		t_output.setSpecifyFields(true);

		// 设置字段的映射关系
		// {"src_columns":["A", "B"], "des_columns":["a", "b"]}
		String[] fieldDatabase = new String[fieldsMapper.size()];
		String[] fieldStream = new String[fieldsMapper.size()];
		int i = 0;
		for (Map.Entry<String, String> entry : fieldsMapper.entrySet()) {
			fieldDatabase[i] = entry.getKey();
			fieldStream[i] = entry.getValue();

			i++;
		}

		t_output.setFieldDatabase(fieldStream);
		t_output.setFieldStream(fieldDatabase);

		// 清空目标表
		t_output.setTruncateTable(true);

		// 将表输出加入转换步骤中
		StepMeta output = new StepMeta("targetTableOutput", t_output);
		transMeta.addStep(output);

		// 配置字段值代码映射
		StepMeta last = input;
		for (Entry<String, Map<String, String>> entry : valuesMapper.entrySet()) {
			String fieldName = entry.getKey();
			Map<String, String> fieldMapper = entry.getValue();
			String[] sourceValues = new String[fieldMapper.size()];
			String[] targetValues = new String[fieldMapper.size()];
			if (fieldMapper.size() > 0) {
				int j = 0;
				for (Entry<String, String> item : fieldMapper.entrySet()) {
					targetValues[j] = item.getKey();
					sourceValues[j] = item.getValue();

					j++;
				}

				ValueMapperMeta t_mapper = new ValueMapperMeta();
				t_mapper.setFieldToUse(fieldName);
				//t_mapper.setNonMatchDefault("other");
				t_mapper.setSourceValue(sourceValues);
				t_mapper.setTargetValue(targetValues);

				StepMeta mapper_meta = new StepMeta("field_value_mapper" + i, t_mapper);
				transMeta.addStep(mapper_meta);

				transMeta.addTransHop(new TransHopMeta(last, mapper_meta));

				last = mapper_meta;
			}
		}

		// 串联成一条数据流
		transMeta.addTransHop(new TransHopMeta(last, output));
		transMeta.setUsingUniqueConnections(true);

		// 执行作业任务
		Trans trans = new Trans(transMeta);
		try {
			trans.execute(null);
			trans.waitUntilFinished();
			if (trans.getErrors() > 0) {
				String errMsg = KettleLogStore.getAppender().getBuffer(trans.getLogChannelId(), false).toString()
						.replaceAll("(\r\n|\r|\n|\n\r)", "<br>");
				throw new RuntimeException(errMsg);
			}
		} finally {
			trans.cleanup();
		}
	}

}
