package com.gvtv.main.hadoop;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.UUID;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.regex.Pattern;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ShutdownHookManager;

public class MapReduceRun {

	/** Pattern that matches any string */
	public static final Pattern MATCH_ANY = Pattern.compile(".*");

	/**
	 * Priority of the RunJar shutdown hook.
	 */
	public static final int SHUTDOWN_HOOK_PRIORITY = 10;

	/**
	 * Unpack a jar file into a directory.
	 *
	 * This version unpacks all files inside the jar regardless of filename.
	 */
	public void unJar(File jarFile, File toDir) throws IOException {
		unJar(jarFile, toDir, MATCH_ANY);
	}

	/**
	 * Unpack matching files from a jar. Entries inside the jar that do not
	 * match the given pattern will be skipped.
	 *
	 * @param jarFile
	 *            the .jar file to unpack
	 * @param toDir
	 *            the destination directory into which to unpack the jar
	 * @param unpackRegex
	 *            the pattern to match jar entries against
	 */
	public void unJar(File jarFile, File toDir, Pattern unpackRegex) throws IOException {
		JarFile jar = new JarFile(jarFile);
		try {
			Enumeration<JarEntry> entries = jar.entries();
			while (entries.hasMoreElements()) {
				final JarEntry entry = entries.nextElement();
				if (!entry.isDirectory() && unpackRegex.matcher(entry.getName()).matches()) {
					InputStream in = jar.getInputStream(entry);
					try {
						File file = new File(toDir, entry.getName());
						ensureDirectory(file.getParentFile());
						OutputStream out = new FileOutputStream(file);
						try {
							IOUtils.copyBytes(in, out, 8192);
						} finally {
							out.close();
						}
					} finally {
						in.close();
					}
				}
			}
		} finally {
			jar.close();
		}
	}

	/**
	 * Ensure the existence of a given directory.
	 *
	 * @throws IOException
	 *             if it cannot be created and does not already exist
	 */
	private void ensureDirectory(File dir) throws IOException {
		if (!dir.mkdirs() && !dir.isDirectory()) {
			throw new IOException("Mkdirs failed to create " + dir.toString());
		}
	}

	/**
	 * Run a Hadoop job jar. If the main class is not in the jar's manifest,
	 * then it must be provided on the command line.
	 */
	public void run(String[] args) throws Throwable {
		String usage = "RunJar jarFile [mainClass] args...";

		if (args.length < 1) {
			System.err.println(usage);
			System.exit(-1);
		}

		int firstArg = 0;
		String fileName = args[firstArg++];
		File file = new File(fileName);
		if (!file.exists() || !file.isFile()) {
			System.err.println("Not a valid JAR: " + file.getCanonicalPath());
			System.exit(-1);
		}
		String mainClassName = null;

		JarFile jarFile;
		try {
			jarFile = new JarFile(fileName);
		} catch (IOException io) {
			throw new IOException("Error opening job jar: " + fileName).initCause(io);
		}

		Manifest manifest = jarFile.getManifest();
		if (manifest != null) {
			mainClassName = manifest.getMainAttributes().getValue("Main-Class");
		}
		jarFile.close();

		if (mainClassName == null) {
			if (args.length < 2) {
				System.err.println(usage);
				System.exit(-1);
			}
			mainClassName = args[firstArg++];
		}
		mainClassName = mainClassName.replaceAll("/", ".");

		File tmpDir = new File(new Configuration().get("hadoop.tmp.dir"));
		ensureDirectory(tmpDir);

		final File workDir;
		try {
			workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
		} catch (IOException ioe) {
			// If user has insufficient perms to write to tmpDir, default
			// "Permission denied" message doesn't specify a filename.
			System.err.println("Error creating temp dir in hadoop.tmp.dir " + tmpDir + " due to " + ioe.getMessage());
			System.exit(-1);
			return;
		}

		if (!workDir.delete()) {
			System.err.println("Delete failed for " + workDir);
			System.exit(-1);
		}
		ensureDirectory(workDir);

		ShutdownHookManager.get().addShutdownHook(new Runnable() {
			@Override
			public void run() {
				FileUtil.fullyDelete(workDir);
			}
		}, SHUTDOWN_HOOK_PRIORITY);

		unJar(file, workDir);

		ArrayList<URL> classPath = new ArrayList<URL>();
		classPath.add(new File(workDir + "/").toURI().toURL());
		classPath.add(file.toURI().toURL());
		classPath.add(new File(workDir, "classes/").toURI().toURL());
		File[] libs = new File(workDir, "lib").listFiles();
		if (libs != null) {
			for (int i = 0; i < libs.length; i++) {
				classPath.add(libs[i].toURI().toURL());
			}
		}
		ClassLoader loader = new URLClassLoader(classPath.toArray(new URL[0]));
		Thread.currentThread().setContextClassLoader(loader);
		
		addLibjarsToPath(args);
		
		Class<?> mainClass = Class.forName(mainClassName, true, loader);
		Method main = mainClass.getMethod("main", new Class[] { Array.newInstance(String.class, 0).getClass() });
		String[] newArgs = Arrays.asList(args).subList(firstArg, args.length).toArray(new String[0]);
		try {
			main.invoke(null, new Object[] { newArgs });
		} catch (InvocationTargetException e) {
			throw e.getTargetException();
		}
	}
	public void addLibjarsToPath(String[] args) throws Exception{
		if(args != null && args.length>0){
			String libjars = null;
			List<URL> libjarsList = new ArrayList<URL>();
			try {
				for (int i=0,len=args.length; i<len; i++) {
					if("-libjars".equals(args[i])){
						libjars = args[i+1];
						StringBuffer libsBuffer = new StringBuffer();
						for (String jar : StringUtils.split(libjars, ",")) {
							if(StringUtils.isBlank(jar)){
								continue;
							}
							File jarFile = new File(StringUtils.removeStartIgnoreCase(jar, "file:/"));
							if(!jarFile.exists()){
								throw new FileNotFoundException(jarFile.toString());
							}
							libsBuffer.append(jarFile.toURI().toURL().toString()).append(",");
							libjarsList.add(jarFile.toURI().toURL());
						}
						if(libsBuffer.length()>0){
							libsBuffer.deleteCharAt(libsBuffer.length()-1);
						}
						args[i+1] = libsBuffer.toString();
						break;
					}
				}
			} catch (Exception e) {
				throw e;
			}
			if(libjarsList!=null&&libjarsList.size()>0){
				Method method = null;
				boolean accessible = false;
				try {
					method = URLClassLoader.class.getDeclaredMethod("addURL", new Class[]{URL.class});
					accessible = method.isAccessible();
					if (accessible == false) {
						method.setAccessible(true);
					}
					URLClassLoader classLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
					for (URL urlFile : libjarsList) {
						method.invoke(classLoader, urlFile);
					}
				} catch (Exception e) {
					throw e;
				} finally {
					if(method!=null){
						method.setAccessible(accessible);
					}
				}
			}
		}
	}
	public static void main(String[] args) {
		List<String> argsList = new ArrayList<String>();
		argsList.add("d:\\logs-mr.jar");
		argsList.add("RequestPage");
		//argsList.add("-conf");
		//argsList.add("E:\\work\\eclipseWork\\bi_data_analysis\\conf\\hadoop\\core-site.xml,E:\\work\\eclipseWork\\bi_data_analysis\\conf\\hadoop\\hdfs-site.xml,E:\\work\\eclipseWork\\bi_data_analysis\\conf\\hadoop\\mapred-site.xml,E:\\work\\eclipseWork\\bi_data_analysis\\conf\\hadoop\\yarn-site.xml");
		argsList.add("-libjars");
		argsList.add("file:/d:/libjars/mysql-connector-java-5.1.7-bin.jar,file:/d:/libjars/gv-ip-utils-1.3.jar");
		argsList.add("-D");
		argsList.add("mapreduce.job.maps=1");
		argsList.add("com.mysql.jdbc.Driver");
		argsList.add("jdbc:mysql://10.0.1.39:3306/hmall_site");
		argsList.add("biread");
		argsList.add("biuserpwd");
		argsList.add("web_visit_log");
		argsList.add("/input/" + UUID.randomUUID());
		
		MapReduceRun jarRun = new MapReduceRun();
		try {
			jarRun.run(argsList.toArray(new String[] {}));
		} catch (Throwable e) {
			e.printStackTrace();
		}
	}
}
