package com.hna.eking.Spark;

import java.io.IOException;
import java.io.InputStream;

import org.apache.spark.launcher.SparkLauncher;

public class SparkLanch {
	public static void lancher() {
		// Process spark;
		System.setProperty("HADOOP_CONF_DIR", "");
		try {
			SparkLauncher launcher = new SparkLauncher();
			launcher.setAppResource("exm.jar").setMaster("yarn");

			launcher.addAppArgs("yarn");
			Process process = launcher.launch();

		
			process.waitFor();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		System.out.println("end....");
	}
	
	private String RunJarName;
	private String RunMode;
//	private String 
	public String getRunJarName() {
		return RunJarName;
	}
	public void setRunJarName(String runJarName) {
		RunJarName = runJarName;
	}
	public String getRunMode() {
		return RunMode;
	}
	public void setRunMode(String runMode) {
		RunMode = runMode;
	}
	
}
