package avicit.bdp.dds.server.worker.task.table;


import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.database.DBUtils;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.table.TableParameters;
import avicit.bdp.dds.common.Constants;
import avicit.bdp.dds.server.entity.TaskExecutionContext;
import avicit.bdp.dds.server.worker.task.AbstractTask;
import avicit.bdp.dds.service.process.ProcessService;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.JSONWriter;
import org.slf4j.Logger;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;


/**
 * table task
 */
public class TableTask extends AbstractTask {

	/**
	 * table parameters
	 */
	private TableParameters tableParameters;


	/**
	 * taskExecutionContext
	 */
	private TaskExecutionContext taskExecutionContext;

	private final ProcessService processService = SpringApplicationContext.getBean(ProcessService.class);

	/**
	 * constructor
	 *
	 * @param taskExecutionContext taskExecutionContext
	 * @param logger               logger
	 */
	public TableTask(TaskExecutionContext taskExecutionContext, Logger logger) {
		super(taskExecutionContext, logger);
		this.taskExecutionContext = taskExecutionContext;
	}

	@Override
	public void init() {
		logger.info("table task params {}", taskExecutionContext.getTaskParams());
		this.tableParameters = JSONObject.parseObject(taskExecutionContext.getTaskParams(), TableParameters.class);

		if (!tableParameters.checkParameters()) {
			throw new RuntimeException("http task params is not valid");
		}
	}

	@Override
	public void handle() throws Exception {

		Connection conn = null;
		PreparedStatement stmt = null;
		ResultSet rs = null;
		try {
			String dataSourceId = processService.getDataSourceIdByTableId(tableParameters.getTableId());
			BaseDataSource baseDataSource = processService.getDataSource(dataSourceId);
			conn = DBUtils.buildConnection(baseDataSource);
			stmt = conn.prepareStatement(getSql(baseDataSource.type()));
			rs = stmt.executeQuery();
			resultProcess(rs);
		} catch (Exception e) {
			logger.error("execute sql error", e);
			//throw new RuntimeException("execute sql error");
		} finally {
			DBUtils.closeResource(conn, stmt, rs);
		}
		//设置执行成功状态
		setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
	}

	private String getSql(String type) {
		String limitStr = "limit 20";
		switch (type) {
			case avicit.bdp.core.constant.Constants.DB_TYPE_ORACLE:
				limitStr = "rownum <= 20";
				break;
			case avicit.bdp.core.constant.Constants.DB_TYPE_DB2:
				limitStr = "fetch first 20 rows only";
				break;
			case avicit.bdp.core.constant.Constants.DB_TYPE_SQLSERVER:
				limitStr = "offset 0 rows fetch next 20 rows only";
				break;
			default:
				limitStr = "limit 20";
		}
		return "select * from " + tableParameters.getTableName() + " " + limitStr;
	}


	/**
	 * result process
	 *
	 * @param resultSet resultSet
	 * @throws Exception
	 */
	private void resultProcess(ResultSet resultSet) throws Exception {
		JSONArray resultJSONArray = new JSONArray();
		ResultSetMetaData md = resultSet.getMetaData();
		int num = md.getColumnCount();

		int rowCount = 0;
		logger.info("节点数据输出， 最多输出20条记录。");
		while (rowCount < 20 && resultSet.next()) {
			JSONObject mapOfColValues = new JSONObject();
			for (int i = 1; i <= num; i++) {
				mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i));
			}
			resultJSONArray.add(mapOfColValues);
			rowCount++;
			logger.info("\t" + JSONObject.toJSONString(mapOfColValues, JSONWriter.Feature.WriteMapNullValue));
		}

	}


	@Override
	public AbstractParameters getParameters() {
		return this.tableParameters;
	}
}
