package org.databandtech.job.jobs;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;

import org.databandtech.job.entity.ScheduledTaskJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * hive执行脚本任务，不是查询任务，不需要返回数据集
 *
 */
public class HiveSqlExecuteJob implements ScheduledTaskJob{
	

	public HiveSqlExecuteJob(String key, String conStr,String sql ,String cron) {
		super();
		this.conStr = conStr;
		this.sql = sql;
		this.key = key;
		this.cron = cron;
	}
	
	private String key;	
	private String cron;
	private String conStr ;// "jdbc:hive2://192.168.13.200:10000/default";
	private String sql;//"load data local inpath '/home/data.txt' overwrite into table user_tb"
	private static final Logger LOGGER = LoggerFactory.getLogger(HiveSqlExecuteJob.class);
	
	public String getConStr() {
		return conStr;
	}

	public void setConStr(String conStr) {
		this.conStr = conStr;
	}

	public String getSql() {
		return sql;
	}

	public void setSql(String sql) {
		this.sql = sql;
	}
	
	public String getKey() {
		return key;
	}

	public void setKey(String key) {
		this.key = key;
	}
	
	public String getCron() {
		return cron;
	}

	public void setCron(String cron) {
		this.cron = cron;
	}

	@Override
	public void run() {
		Connection con = null;
		try {
			
			Class.forName("org.apache.hive.jdbc.HiveDriver");
			con = DriverManager.getConnection(conStr, "", "");
			Statement stmt = con.createStatement();
			stmt.execute(sql);
			LOGGER.info("CommandExecuteJob => {}  run  当前线程名称 {} ", key, Thread.currentThread().getName());
		} catch (Exception ex) {
			ex.printStackTrace();
		} finally {
			try {
				if (con != null)
					con.close();
			} catch (Exception ex) {
			}
		}
	}

}
