package com.cnebula.dataprocess.task.center.service.impl;

import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;

import org.springframework.stereotype.Component;

import com.cnebula.dataprocess.common.JobPayload;
import com.cnebula.dataprocess.common.hive.HiveDataPass;
import com.cnebula.dataprocess.common.hive.HiveLoadData;
import com.cnebula.dataprocess.common.hive.HivePurge;
import com.cnebula.dataprocess.common.sqoop.DataSourceNode;
import com.cnebula.dataprocess.task.center.MainTask;
import com.cnebula.dataprocess.task.center.Task;
import com.cnebula.dataprocess.task.center.message.Connection;
import com.cnebula.dataprocess.task.center.message.DatatableMapping;
import com.cnebula.dataprocess.task.center.message.FieldMapping;
import com.cnebula.dataprocess.task.center.message.MainTaskMessage;
import com.cnebula.dataprocess.task.center.message.TableMapping;
import com.cnebula.dataprocess.task.center.service.TaskService;

@Component
public class TaskServiceImpl implements TaskService {

	@Override
	public MainTask createMainTask(MainTaskMessage obj) {

		MainTask mainTask = new MainTask();
		mainTask.setId(obj.getId());

		Connection conn = obj.getConnection();
		DatatableMapping datatableMapping = obj.getDatatableMapping();
		List<TableMapping> tableMapping = datatableMapping.getTableMapping();

		List<Task> tasks = new ArrayList<Task>();

		for (TableMapping item : tableMapping) {
			String taskId = item.getDataTableId();

			Task task = new Task();
			task.setTaskId(taskId);
			task.setMainTaskId(mainTask.getId());

			LinkedList<JobPayload> jobs = new LinkedList<JobPayload>();
			
			String partition = item.getUserPrimaryKey();
			
			if(partition == null || partition.isEmpty()){
				partition = getPartitionColumn(item.getFieldMapping(), item.getDataPrimaryKey());
			}

			DataSourceNode node = new DataSourceNode();
			node.setJobId(UUID.randomUUID().toString());
			node.setTaskId(taskId);

			node.setHost(conn.getHost());
			node.setPort(conn.getPort());
			node.setCharset(conn.getCharset());
			node.setDbType(obj.getDsType());
			node.setDatabase(conn.getDatabase());
			node.setUsername(conn.getUsername());
			node.setPassword(conn.getPassword());
			node.setSql(item.getSql());
			node.setPartition(partition);

			jobs.add(node);

			String hiveTable = item.getDataTableName();
			String insert = item.getInsert();
			if (insert != null && !insert.isEmpty()) {
				hiveTable = "tmp_" + hiveTable;
			}

			HiveLoadData hiveLoadData = new HiveLoadData();
			hiveLoadData.setJobId(UUID.randomUUID().toString());
			hiveLoadData.setTaskId(task.getTaskId());

			jobs.add(hiveLoadData);

			hiveLoadData.setHiveTable(hiveTable);

			if (item.getInsert() != null && !item.getInsert().isEmpty()) {
				HivePurge hivePurge = new HivePurge();
				hivePurge.setJobId(UUID.randomUUID().toString());
				hivePurge.setTaskId(task.getTaskId());
				hivePurge.setSql(item.getInsert());

				jobs.add(hivePurge);
			}

			if (item.getDataPrimaryKey() != null && !item.getDataPrimaryKey().isEmpty()) {
				HiveDataPass hiveDataPass = new HiveDataPass();
				hiveDataPass.setJobId(UUID.randomUUID().toString());
				hiveDataPass.setTaskId(task.getTaskId());
				hiveDataPass.setPassColumn(item.getDataPrimaryKey());
				hiveDataPass.setTable(item.getDataTableName());

				jobs.add(hiveDataPass);
			}

			task.setJobs(jobs);

			tasks.add(task);

		}

		mainTask.setTasks(tasks);

		return mainTask;
	}


	private String getPartitionColumn(List<FieldMapping> fieldMapping, String dataPrimaryKey) {
		String partitionColumn = null;

		for (FieldMapping item : fieldMapping) {
			String dataFieldName = item.getDataFieldName();
			if (dataFieldName.equals(dataPrimaryKey)) {
				String userFieldName = item.getUserFieldName();
				partitionColumn = userFieldName;
				break;
			}
		}

		return partitionColumn;
	}

}
