package com.gvtv.main.execute;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;

import com.gvtv.main.exception.ExecuteException;
import com.gvtv.main.execute.ExecuteJob.ExecuteJobState;
import com.gvtv.main.hive.HiveJdbc;
import com.gvtv.main.schedul.SchedulEnv;
import com.gvtv.main.schedul.SchedulInfo;
import com.gvtv.main.util.AppTools;
import com.gvtv.main.util.Constant;

public class Hive2TxtJobWorker extends AbstractJobWorker implements JobWorker {

	private Logger logger = Logger.getLogger(Hive2TxtJobWorker.class);
	private HiveJdbc hiveJdbc;
	private ExecutorService executorService;
	private volatile boolean isDone = false;
	private BlockingQueue<String> queue;

	public Hive2TxtJobWorker(SchedulInfo schedulInfo, ExecuteJob executeJob, SchedulEnv env) {
		super(schedulInfo, executeJob, env);
		this.hiveJdbc = new HiveJdbc();
		executorService = Executors.newSingleThreadExecutor();
		queue = new LinkedBlockingQueue<String>(2000);
	}

	@Override
	public void work() {
		job.setStartTime(new Date());
		logger.info("job ["+job.getId()+"]开始运行，运行时间："+new DateTime(job.getSubmitTime().getTime()).toString(Constant.DATETIME_FORMAT));
		env.convert(job);
		String txtSeparator = job.getParamValue("separator");
		String line = job.getParamValue("line");
		if (txtSeparator == null) {
			txtSeparator = ",";
		}
		logger.info("job ["+job.getId()+"]字段分隔符:" + txtSeparator);
		if (StringUtils.isBlank(line)) {
			line = System.getProperty("line.separator");
		}

		logger.info("job ["+job.getId()+"]行分隔符:" + line);
		String query = AppTools.replaceBlank(job.getParamValue("query"));
		if(StringUtils.isBlank(query)){
			logger.error("job ["+job.getId()+"]查询语句为空.");
			job.setJobState(ExecuteJobState.FAILED);
			return ;
		}
		try {
			logger.info("job ["+job.getId()+"]查询的sql: " + query);
			List<String> sets = job.getHiveSets();
			if(sets!=null && !sets.isEmpty()){
				for (String set : sets) {
					if(StringUtils.isBlank(set)){
						logger.warn("job ["+job.getId()+"] 的hive set 语句为空，跳过。。。");
						continue;
					}
					hiveJdbc.execute(AppTools.replaceBlank(set));
				}
			}
			ResultSet rs = null;
			List<String> args = job.getArgs();
			if(args!=null && args.size()>0){
				rs = hiveJdbc.query(query, args.toArray(new String[]{}));
			}else {
				rs = hiveJdbc.query(query);
			}
			Future<Boolean> future = executorService.submit(new TxtWriter());
			ResultSetMetaData rsmd = rs.getMetaData();
			int rowCnt = rsmd.getColumnCount();
			StringBuilder sb = new StringBuilder();
			long cnt = 0;
			while (rs.next()) {
				if (isDone()) {
					break;
				}
				for (int i = 1; i <= rowCnt; i++) {
					if (i != 1) {
						sb.append(txtSeparator);
					}
					sb.append(rs.getObject(i));
				}
				sb.append(line);
				queue.put(sb.toString());
				sb.delete(0, sb.length());
				cnt++;
			}
			logger.info("job ["+job.getId()+"]查询了"+cnt+"条数据.");
			setDone(true);
			if (future.get().booleanValue()) {
				logger.info("job ["+job.getId()+"]写文件成功。");
				job.setJobState(ExecuteJobState.SUCCESS);
			} else {
				logger.info("job ["+job.getId()+"]写文件失败。");
				job.setJobState(ExecuteJobState.FAILED);
			}
			
		} catch (ExecuteException e) {
			logger.error("job ["+job.getId()+"]查询数据出错." + e.getMessage());
			setDone(true);
			job.setJobState(ExecuteJobState.FAILED);
		} catch (SQLException e) {
			logger.error("job ["+job.getId()+"]获取数据出错." + e.getMessage());
			setDone(true);
			job.setJobState(ExecuteJobState.FAILED);
		} catch (InterruptedException e) {
			e.printStackTrace();
			setDone(true);
			job.setJobState(ExecuteJobState.FAILED);
		} catch (ExecutionException e) {
			e.printStackTrace();
			setDone(true);
			job.setJobState(ExecuteJobState.FAILED);
		}
		job.setEndTime(new Date());
		logger.info("job ["+job.getId()+"]结束运行，结束时间："+new DateTime(job.getEndTime().getTime()).toString(Constant.DATETIME_FORMAT)+"，运行状态，"+job.getJobState());
	}

	@Override
	protected void clear() {
		hiveJdbc.close();
		executorService.shutdown();
	}

	public boolean isDone() {
		return isDone;
	}

	public void setDone(boolean isDone) {
		this.isDone = isDone;
	}

	public BlockingQueue<String> getQueue() {
		return queue;
	}

	public void setQueue(BlockingQueue<String> queue) {
		this.queue = queue;
	}

	public class TxtWriter implements Callable<Boolean> {

		@Override
		public Boolean call() throws Exception {
			logger.info("job ["+job.getId()+"]写文件线程开始运行.");
			String txtPath = job.getParamValue("outpath");
			String deleteTxtFile = job.getParamValue("delete");
			File txtFile = new File(txtPath);

			if ("true".equalsIgnoreCase(deleteTxtFile)) {
				if (txtFile.exists()) {
					if (txtFile.isFile()) {
						txtFile.delete();
					}
				}
			}
			if (!txtFile.exists()) {
				if (!txtFile.createNewFile()) {
					throw new ExecuteException("创建文件" + txtFile.getAbsolutePath() + "失败.");
				}
			}
			if (!txtFile.isFile()) {
				setDone(true);
				throw new ExecuteException(txtPath + "不是一个文件.");
			}
			boolean rel = true;
			FileWriter fw = new FileWriter(txtFile, true);
			long cnt = 0;
			try {
				while (!Thread.currentThread().isInterrupted()) {
					String data = queue.poll(5, TimeUnit.SECONDS);
					if (data != null) {
						fw.append(data);
						cnt++;
					} else if (isDone() && queue.size() <= 0) {
						break;
					}
				}
			} catch (InterruptedException e) {
				logger.error("job ["+job.getId()+"]获取队列里的数据出现异常. " + e.getMessage());
				setDone(true);
				rel = false;
			} catch (IOException e) {
				logger.error("job ["+job.getId()+"]写文件[" + txtPath + "]出错. " + e.getMessage());
				setDone(true);
				rel = false;
			} finally {
				try {
					fw.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			logger.info("job ["+job.getId()+"]写文件线程结束运行，写入了" + cnt + "条数据。");
			return rel;
		}
	}
}
