
package cn.sinobest.pe.engine.excutor;

import java.io.PrintWriter;
import java.io.StringWriter;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Date;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.UUID;

import org.pentaho.di.core.Const;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.JobLogTable;
import org.pentaho.di.core.logging.JobLogTable.ID;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogStatus;
import org.pentaho.di.core.logging.LogTableField;
import org.pentaho.di.core.logging.LoggingObject;
import org.pentaho.di.core.logging.LoggingObjectInterface;
import org.pentaho.di.core.logging.LoggingRegistry;
import org.pentaho.di.core.parameters.UnknownParamException;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.special.JobEntrySpecial;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.StringObjectId;
import org.pentaho.di.repository.kdr.KettleDatabaseRepository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransConfiguration;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.www.CarteSingleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.sinobest.knob.core.meta.TaskStatusMeta;
import cn.sinobest.knob.core.rpc.Task;
import cn.sinobest.knob.core.rpc.TaskState;
import cn.sinobest.knob.worker.IExecutor;
import cn.sinobest.knob.worker.watcher.TaskWatcher;
import cn.sinobest.pe.Excutor;
import cn.sinobest.pe.cons.RepositorySingleton;
import cn.sinobest.pe.engine.TaskType;

/**
 * 任务执行器    
 * @author wuxinxue
 * @time 2015-6-16 下午5:36:23
 * @copyright hnisi
 */
public class TaskExecutor implements IExecutor {
	
	private final Logger LOG = LoggerFactory.getLogger(getClass());
	
	/**
	 * 是否满负荷
	 */
	public synchronized  boolean isfullLoad() {
//	    String memoryValveStr = Excutor.peConfig.getValue("memory.full.valve", PECons.memoryFullValveDefault);
//		long totalMemory = Runtime.getRuntime().totalMemory();
//		long freeMemory = Runtime.getRuntime().freeMemory();
//	    long maxMemory = Runtime.getRuntime().maxMemory();
//	    long usedMemory = totalMemory - freeMemory;
//	    LOG.info("worker memory info:{}---{} ", maxMemory, usedMemory );
	    String jobCount = Excutor.peConfig.getValue("pe.concurr.job.count", "5");
	    //wxx 20160726 内存超负荷 或者 运行作业数超限
//	    if(100D*usedMemory*1.3/maxMemory > Double.parseDouble(memoryValveStr)||Excutor.taskMap.size()>Integer.parseInt(jobCount)){
		if(Excutor.taskMap.size()>Integer.parseInt(jobCount)){
//	    	System.gc();
	    	return true;
	    } else {
	    	return false;
	    }
	}
	
	public boolean reviseTaskLog(TaskStatusMeta statusMeta){
    	if(statusMeta == null ){
    		return true;
    	}
    	String tableName = Excutor.peConfig.getValue("Job.Log.Table.Name");
		String querySql = "UPDATE "+ tableName +" SET STATUS='stop',ERRORS=1 WHERE CHANNEL_ID=?";
		int ok = Excutor.dbService.update(querySql, new Object[]{statusMeta.getTaskId()}, new int[]{ Types.VARCHAR});
		return ok>0;
	}
	
	/**
	 * 添加任务
	 * @param task
	 * @param taskWatcher
	 */
	public void addTask(final Task task, final TaskWatcher taskWatcher) {
//		Map<String, LoggingObjectInterface> map = LoggingRegistry.getInstance().getMap();
//	    for (String key : map.keySet()) {
//	    	LoggingObject obj = (LoggingObject) map.get(key);
//	    	System.out.println(key +"---"+obj.getObjectType() + ", ");
//		}
//	    System.out.println("--------------map count:" + map.size());
//	    System.out.println("kettle log store lines:"+ KettleLogStore.getInstance().getAppender().size());
		
		String jobIdentity = task.jobIdentity; 
		String jobId = task.jobId;
		String taskId = task.taskId;
		LOG.info("receive new task:{}---{}", jobIdentity , jobId);
		
		KettleDatabaseRepository repository = null;
		try {
			repository = (KettleDatabaseRepository) RepositorySingleton.getRepository(taskId);
		} catch (Exception e2) {
			try {
				if(repository.isConnected()){
					repository.disconnect();
				}
			} catch (Exception e) {
				LOG.error("close repository connection failed:{}", e);
			}
			LOG.error("get repository connection failed:{}", e2);
			return;
		}
		
		Job job = startJob(jobIdentity, taskId, repository);
		TaskStatusMeta tsm = new TaskStatusMeta(jobId, taskId, jobIdentity, TaskState.RUNNING);
		tsm.setBegintime(new Date());
		if (job == null) {
			tsm.setState(TaskState.FAILED);
    		LOG.info("start new task failed:{}---{}---{}", jobIdentity , jobId , TaskState.FAILED);
    		repository.disconnect();
    		taskWatcher.updateStatus(tsm);
		} else {
			taskWatcher.updateStatus(tsm);
			cn.sinobest.pe.cons.Task curr = new cn.sinobest.pe.cons.Task( job, taskWatcher);
			Excutor.taskMap.put(taskId, curr);
		}
	}

	/**
	 * 启动job
	 * @param jobID 资源库中任务id
	 * @param taskUID  ganger生成任务id
	 * @return
	 */
	public synchronized Job startJob(String jobID, String taskUID, KettleDatabaseRepository repository) {
		JobMeta jobMeta = null;
	    try {
	    	jobMeta = repository.loadJob(new StringObjectId(jobID), null, taskUID);
	    } catch (Throwable e) {
        	LOG.error("start job[{}] failed:{}", jobID , e);
	    	Database ldb = repository.getDatabase();
	        try {
	        	String logContent = "job loading error: ";
	        	StringWriter sw = new StringWriter();   
	            PrintWriter pw = new PrintWriter(sw, true);   
	            e.printStackTrace(pw);   
	            pw.flush();   
	            sw.flush(); 
	            logContent += sw.toString();
	        	
	        	ldb.connect();
	        	ldb.setCommit( 10 );
	        	JobLogTable logTable = JobLogTable.getDefault( null, null );
	        	String tableName = Excutor.peConfig.getValue("Job.Log.Table.Name");
	        	
	        	//20160712 wxx 直接通过seq取得batch 
	        	ldb.getDatabaseMeta().setAttributes(new Properties());
//	        	long id_batch = ldb.getNextSequenceValue( "", KettleVariablesList.getInstance().getDefaultValueMap().get(BaseDatabaseMeta.SEQUENCE_FOR_BATCH_ID), null );
	        	long id_batch = ldb.getDatabaseMeta().getNextBatchId( ldb, "", tableName, logTable.getKeyField().getFieldName() );
				RowMetaAndData logRecord = createJobErrorLogData(logTable, id_batch, taskUID, jobID, LogStatus.ERROR, logContent);
				ldb.insertRow( "", tableName, logRecord.getRowMeta(), logRecord.getData() );
		        if ( !ldb.isAutoCommit() ) {
		          ldb.commitLog( true,  logTable );
		        }
	        } catch (KettleDatabaseException e1) {
	        	LOG.error("write job[{}] log failed:{}", jobID , e1);
	        } finally{
	        	try {
					if (ldb.getConnection().isClosed()) {
						ldb.disconnect();
					}
				} catch (SQLException e1) {
					LOG.error("close connection failed:{}", e1);
				}
	        }
	        return null;
	    }

	    JobExecutionConfiguration jec = new JobExecutionConfiguration();
	    jec.setRepository(repository);
	    jobMeta.setLogLevel(jec.getLogLevel());
	    jobMeta.injectVariables(jec.getVariables());
	    
	    //取消job的定时器
	    JobEntryCopy jeco = jobMeta.findJobEntry(JobMeta.STRING_SPECIAL_START, 0, false);
	    if(jeco != null){//2015-12-21 wxx 做非空判断
	    	JobEntrySpecial jes = (JobEntrySpecial) jeco.getEntry();
	    	jes.setRepeat(false);
	    	jes.setSchedulerType(0);
	    }
	    
	    final Job job = new Job(repository, jobMeta, null, taskUID);
	    job.initializeVariablesFrom(null);
	    job.getJobMeta().setInternalKettleVariables(job);
	    job.injectVariables(jec.getVariables());
	    job.setArguments(jec.getArgumentStrings());
	
	    job.copyParametersFrom(jobMeta);
	    job.clearParameters();
	    String[] parameterNames = job.listParameters();
	    for (int idx = 0; idx < parameterNames.length; idx++) {
	    	String thisValue = (String)jec.getParams().get(parameterNames[idx]);
	    	if (Const.isEmpty(thisValue)) continue;
	    	try {
	    		jobMeta.setParameterValue(parameterNames[idx], thisValue);
	    	} catch (UnknownParamException e) {
	    		e.printStackTrace();
	    	}
	    }

	    jobMeta.activateParameters();
	    String startCopyName = jec.getStartCopyName();
	    if ((startCopyName != null) && (!startCopyName.isEmpty())) {
	    	int startCopyNr = jec.getStartCopyNr();
	    	JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry(startCopyName, startCopyNr, false);
	    	job.setStartJobEntryCopy(startJobEntryCopy);
	    }
	
	    job.setContainerObjectId(taskUID);
//	    job.setSocketRepository(CarteSingleton.getInstance().getSocketRepository());
//	    synchronized (CarteSingleton.getInstance().getJobMap()) {
//	    	CarteSingleton.getInstance().getJobMap().addJob(job.getJobname(), taskUID, job, new JobConfiguration(jobMeta, jec));
//	    }
//	
//	    KettleLogStore.discardLines(job.getLogChannelId(), true);
	    
	    job.setInteractive(true);
	    job.setName(jobMeta.getName());
	    job.start();
	    LOG.info("job[{}---{}] was started with batch id---{}", jobID , jobMeta.getName(), job.getBatchId() );
	    return job;
	}
	
	/* 
	 * 结束任务
	 * @param taskUID
	 */
	public void abortTask(String taskUID) {
//		LOG.info("receive stop require:{}", taskUID);
		abortTask( taskUID, TaskType.JOB);
		
		//是否要清理用户resp
		//cleanUserResp();
	}

	/**
	 * 结束任务
	 * @param taskRId
	 * @param taskType
	 */
	public void abortTask(String taskUID, TaskType taskType) {
		LOG.info("receive stop require:{}", taskUID);
		Job job = Excutor.taskMap.get(taskUID).getJob();
		if (job==null){
			return;
		} else {
//		  Map<JobEntryCopy, JobEntryJob> jobEntryJobs = job.getActiveJobEntryJobs();
//	      for (JobEntryJob jobEntryJob : jobEntryJobs.values()){
//	    	  jobEntryJob.getJob().stopAll();
//	      }
//	      Map<JobEntryCopy, JobEntryTrans> jobEntryTranses = job.getActiveJobEntryTransformations();
//	      for (JobEntryTrans jobEntryTrans : jobEntryTranses.values()){
//	    	  jobEntryTrans.getTrans().stopAll();
//	      }
	      job.stopAll();
	      Excutor.taskMap.get(taskUID).getTaskWatcher().getTaskStatusMeta().setState(TaskState.ABORTED);
//	      KettleLogStore.discardLines(job.getLogChannelId(), true);
//	      CarteSingleton.getInstance().getJobMap().removeJob(new CarteObjectEntry(job.getName(), taskUID));
		}
	}
	
	/**
	 * 启动trans
	 * @param transId
	 * @return
	 */
	public Trans addAndStartTrans(String transId, String kettleUserName) {
		Repository repository = null;
		try {
			repository = RepositorySingleton.getRepository(transId);
		} catch (Exception e1) {
			e1.printStackTrace();
		}
	    TransMeta transMeta = null;
	    try {
	    	transMeta = repository.loadTransformation(new StringObjectId(transId), null);
	    } catch (KettleException e) {
	    	e.printStackTrace();
	    }
	
	    TransExecutionConfiguration tec = new TransExecutionConfiguration();
	    tec.setRepository(repository);
	    transMeta.setLogLevel(tec.getLogLevel());
	    transMeta.injectVariables(tec.getVariables());
	
	    Map<String, String> params = tec.getParams();
	    for (String param : params.keySet()) {
	    	String value = (String)params.get(param);
	    	try {
	    		transMeta.setParameterValue(param, value);
	    	} catch (UnknownParamException e) {
	    		e.printStackTrace();
	    	}
	    }
	
	    Trans trans = new Trans(transMeta);
	    trans.setRepository(repository);
	    trans.setSocketRepository(CarteSingleton.getInstance().getSocketRepository());
	
	    KettleEnvironment.setExecutionInformation(trans, repository);
	    
	    String carteObjectId = UUID.randomUUID().toString();
	    CarteSingleton.getInstance().getTransformationMap().addTransformation(transMeta.getName(), carteObjectId, trans, new TransConfiguration(transMeta, tec));
	    trans.setContainerObjectId(carteObjectId);
	
	    KettleLogStore.discardLines(trans.getLogChannelId(), true);
	    try
	    {
	    	executeTrans(trans);
	    } catch (KettleException e) {
	    	e.printStackTrace();
	    }
	    return trans;
	}

	/**
	 * 执行trans
	 * @param trans
	 * @throws KettleException
	 */
	protected void executeTrans(final Trans trans) throws KettleException {
	    Thread t = new Thread() {
	      public void run() {
	        try {
	        	trans.execute(null);
	        }
	        catch (KettleException e) {
	          e.printStackTrace();
	        }
	      }
	    };
	    t.start();
	}

	/**
	 * Trans状态
	 * @param trans
	 * @return
	 */
	@SuppressWarnings("unused")
	private TaskState getTransstate(Trans task) {
		if (task.isRunning()) {
			if (task.isStopped()) {
				return TaskState.ABORTED;
			} else {
				if (task.isFinished()) {
					if (task.getResult() != null && task.getResult().getNrErrors() > 0) {
						return TaskState.FAILED;
			        } else {
			        	return TaskState.FINISHED;
			        }
				} else {
					return TaskState.RUNNING;
				}
			}
	    } else {
	    	if (task.isStopped()) {
	    		return TaskState.ABORTED;
	    	} else {
	    		return TaskState.RUNNING;
	    	}
	    }
	}

	/**
	 * abort all jobs
	 */
	public void abortAllJobs() {
		for(Entry<String, cn.sinobest.pe.cons.Task> e: Excutor.taskMap.entrySet()){
			//only deal the job in running status
			if(e.getValue().getTaskWatcher().getTaskStatusMeta().getState() == TaskState.RUNNING){
				e.getValue().getJob().stopAll();
				e.getValue().getTaskWatcher().getTaskStatusMeta().setState(TaskState.ABORTED);
			}
		}
	}
	
	/**
	 * create job error log data
	 * @param logTable
	 * @param batchId
	 * @param channelId
	 * @param jobId
	 * @param status
	 * @param logContent
	 * @return
	 */
	public RowMetaAndData createJobErrorLogData(JobLogTable logTable, long batchId, String channelId, 
			String jobId, LogStatus status, String logContent){
		RowMetaAndData row = new RowMetaAndData();
		Date date = new Date();
      for ( LogTableField field : logTable.getFields() ) {
        if ( field.isEnabled() ) {
        	Object value = null;
            switch ( ID.valueOf( field.getId() ) ) {
              case ID_JOB:
                value = batchId;
                break;
              case CHANNEL_ID:
                value = channelId;
                break;
              case JOBID:
				value = Long.parseLong(jobId);
                break;
              case JOBNAME:
  				value = "";
                  break;
              case STATUS:
                value = status.getStatus();
                break;
              case LINES_READ:
                value = 0L;
                break;
              case LINES_WRITTEN:
                value = 0L;
                break;
              case LINES_INPUT:
                value = 0L;
                break;
              case LINES_OUTPUT:
                value = 0L;
                break;
              case LINES_UPDATED:
                value = 0L;
                break;
              case LINES_REJECTED:
                value = 0L;
                break;
              case ERRORS:
                value = 1L;
                break;
              case STARTDATE:
                value = date;
                break;
              case LOGDATE:
                value = date;
                break;
              case ENDDATE:
                value = date;
                break;
              case DEPDATE:
                value = date;
                break;
              case REPLAYDATE:
                value = date;
                break;
              case LOG_FIELD:
                value = logContent;
                break;
              case EXECUTING_SERVER:
                value = Excutor.peConfig.getValue("workerId");
                break;
              case EXECUTING_USER:
                value = "admin";
                break;
              case START_JOB_ENTRY:
                value = null;
                break;
              case CLIENT:
                value = KettleClientEnvironment.getInstance().getClient() != null ? KettleClientEnvironment
                                .getInstance().getClient().toString() : "unknown";
                break;
              default:
                break;
            }
          row.addValue( field.getFieldName(), field.getDataType(), value );
          row.getRowMeta().getValueMeta( row.size() - 1 ).setLength( field.getLength() );
        }
      }
      return row;
	}
	
	public void exitInstance(boolean abortJobs){
		if(abortJobs){
			abortAllJobs();
			try {
				Thread.sleep(3*1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			System.exit(-1);
		} else {
			new Thread(){
				public void run() {
					boolean wait = true;
					int count=0;
					while(wait){
						try {
							Thread.sleep(3*1000);
							count++;
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
						if(Excutor.taskMap==null || Excutor.taskMap.size()==0 || count>20){
							System.exit(1);
						}
					}
				};
			}.start();
		}
	}
}