package com.cnebula.dataprocess.hive.job.serivce.impl;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.stereotype.Component;

import com.cnebula.dataprocess.common.JobStatus;
import com.cnebula.dataprocess.common.hive.HiveDataPassJobResult;
import com.cnebula.dataprocess.common.hive.HiveLoadDataJobResult;
import com.cnebula.dataprocess.common.hive.HivePurgeJobResult;
import com.cnebula.dataprocess.hive.job.HiveDataPassSQLBuilder;
import com.cnebula.dataprocess.hive.job.producer.HiveJobProducer;
import com.cnebula.dataprocess.hive.job.serivce.HiveService;

@Component
public class HiveServiceImpl implements HiveService {

	@Autowired
	JdbcTemplate jdbcTemplate;

	@Autowired
	HiveJobProducer producer;

	private static final ExecutorService executorService = Executors.newFixedThreadPool(15);

	@Override
	public void loadDataOverwrite(String taskId, String jobId, String dataFile, String tableName) {
		final String sql = "LOAD DATA INPATH '" + dataFile + "' OVERWRITE INTO TABLE " + tableName;

		executorService.execute(new Runnable() {

			@Override
			public void run() {
				HiveLoadDataJobResult rst = new HiveLoadDataJobResult();
				rst.setJobId(jobId);
				rst.setTaskId(taskId);
				try {
					jdbcTemplate.execute(sql);
				} catch (Throwable e) {
					rst.setStatus(JobStatus.FAIL);
					rst.setMessage(e.getLocalizedMessage());
				}
				producer.sendLoadJobMessage(rst);
			}
		});

	}

	@Override
	public void loadData(String taskId, String jobId, String dataFile, String tableName) {
		final String sql = "LOAD DATA INPATH '" + dataFile + "' INTO TABLE " + tableName;

		executorService.execute(new Runnable() {

			@Override
			public void run() {
				HiveLoadDataJobResult rst = new HiveLoadDataJobResult();
				rst.setJobId(jobId);
				rst.setTaskId(taskId);
				try {
					jdbcTemplate.execute(sql);
				} catch (Throwable e) {
					rst.setStatus(JobStatus.FAIL);
					rst.setMessage(e.getLocalizedMessage());
				}
				producer.sendLoadJobMessage(rst);

			}
		});

	}

	@Override
	public void dataPass(String taskId, String jobId, String tableName, String passColumn) {

		final String sql = "DESCRIBE " + tableName;
		final String columnName = "col_name";
		

		List<String> columns = jdbcTemplate.query(sql, new ResultSetExtractor<List<String>>() {
			@Override
			public List<String> extractData(ResultSet rs) throws SQLException, DataAccessException {
				List<String> columns = new ArrayList<String>();
				while (rs.next()) {
					columns.add(rs.getString(columnName));
				}
				return columns;
			}
		});

		HiveDataPassSQLBuilder sqlBuilder = new HiveDataPassSQLBuilder(tableName, passColumn, columns);
		String datapassSQL = sqlBuilder.getResult();

		HiveDataPassJobResult rst = new HiveDataPassJobResult();
		rst.setJobId(jobId);
		rst.setTaskId(taskId);
		try {
			jdbcTemplate.execute(datapassSQL);
		} catch (Throwable e) {
			rst.setStatus(JobStatus.FAIL);
			rst.setMessage(e.getLocalizedMessage());
		}
		producer.sendDataPassJobMessage(rst);
	}

	@Override
	public void purge(String taskId, String jobId, String sql) {

		HivePurgeJobResult rst = new HivePurgeJobResult();
		rst.setJobId(jobId);
		rst.setTaskId(taskId);
		try {
			jdbcTemplate.execute(sql);
		} catch (Throwable e) {
			rst.setStatus(JobStatus.FAIL);
			rst.setMessage(e.getLocalizedMessage());
		}
		producer.sendPurgeJobMessage(rst);
	}

}
