package com.taobao.udp.udf;

import java.net.URI;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.hooks.PreJobHook;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;

public class PreJobLoadJniResHook implements PreJobHook{

	@Override
	public void run(SessionState session,
			org.apache.hadoop.hive.ql.QueryPlan queryPlan, JobConf job,
			Integer taskId) throws Exception {
		// TODO Auto-generated method stub
		// TODO Auto-generated method stub
		LogHelper cs = SessionState.getConsole();
		//session.get
		cs.printError("-----------------------start");
		//HiveConf conf = session.getConf();
		DistributedCache.createSymlink(job);
		DistributedCache.addCacheArchive(new URI("/group/tsc/commons/ws_1.3.0.9_3.4u7.jar#ws"),job); 
		job.set("mapred.child.java.opts","-Djava.library.path=./ws -Xmx2048m");       // 需要设置内存上限，增加运行时稳定性
		
		cs.printError(ShellCommandExecutor.execCommand("pwd"));
		Path[] path =  DistributedCache.getArchiveClassPaths(job);
		for(Path p : path){
			cs.printError(p.toString());
		}
		cs.printError("-----------------------end");
		cs.printError(job.get("mapred.child.java.opts"));
	}//

	
	

}
