package edu.ruc.cluster.action;

import java.util.Iterator;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.RandomAccessFile;

import org.apache.tools.tar.TarEntry;
import org.apache.tools.tar.TarInputStream;
import java.util.zip.GZIPInputStream;

import edu.ruc.cluster.hadoop.HadoopConfig;
import edu.ruc.cluster.hadoop.HadoopConst;
import edu.ruc.cluster.host.HostsAddCmd;
import edu.ruc.cluster.host.HostsFileGenerator;
import edu.ruc.cluster.util.Const;
import edu.ruc.cluster.util.DefaultLogHandler;
import edu.ruc.cluster.util.FileTransfer;
import edu.ruc.cluster.util.PropertyRetrieval;
import edu.ruc.cluster.util.TarGzGenerator;

/**
 * This class is used for the following configurations:
 * 		1. Add hosts information to current machine 
 * 		2. sftp ip.list as /home/someuser/addfile to each machine(master && slave) where 
 * 				someuser is in sudoer list
 * 		3. add host information to /etc/hosts to each machine
 * 		4. add hadoop login user to each machine
 * @author deke
 * modified: add dealjdk() on 2012-12-11
 * modified: add dealhadoop() on 2012-12-28 
 */
public class Entrance {

	private DefaultLogHandler logHandler = null;
	private List<String> hosts = null;
	private List<String> username = null;
	private List<String> passwd = null;
	
	private String localUser = null;
	private String localPasswd = null;
	
	private String addUser = null;
	private String addPasswd = null;
	
	private static int MACHINE_NUMBER = 0;
	
	PropertyRetrieval property = null;
	
	public void init(){
//		logHandler = new DefaultLogHandler();
		logHandler = DefaultLogHandler.getInstance();
		hosts = new ArrayList<String>();
		username = new ArrayList<String>();
		passwd = new ArrayList<String>();
		property = new PropertyRetrieval(Const.CONFIG_FILE);
	}
	private void readin(){		
		try{
			BufferedReader reader = new BufferedReader(new FileReader(new File(Const.USER_PW_FILE)));
			String line = "";
			while((line = reader.readLine()) != null){
				String[] strs = line.split(Const.LINE_SEPERATOR);
				
				if(line.contains(Const.LOCALHOST_TAG)){
					localUser = strs[1];
					localPasswd = strs[2];
				}else if(line.contains(Const.HADOOP_USER_TAG)){
					addUser = strs[1];
					addPasswd = strs[2];
				}else{					
					hosts.add(strs[0]);
					username.add(strs[1]);
					passwd.add(strs[2]);
				}
				
			}
			reader.close();
			MACHINE_NUMBER = hosts.size();
			
		}catch(FileNotFoundException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}
	}
	private void sftp(){
				
		for(int i = 0 ; i < MACHINE_NUMBER; i ++){
			logHandler.info("transform ip.list to " + username.get(i) + "@" + hosts.get(i));
			HostsFileGenerator gen = new HostsFileGenerator();
			gen.transform(hosts.get(i), username.get(i), passwd.get(i));
		}
	}
	private void appendHostFile(){
		for(int i = 0 ; i < MACHINE_NUMBER; i ++){
			String host = username.get(i) + "@" + hosts.get(i);
			logHandler.info("adding host information to " + host);
			HostsAddCmd add = new HostsAddCmd(host, passwd.get(i));
			add.addhostsfile();
		}
	}
	private void addHadoopUser(){
		for(int i = 0 ; i < MACHINE_NUMBER; i ++){
			String host = username.get(i) + "@" + hosts.get(i);
			logHandler.info("add user " + addUser + " to " + host);
			UserAddCmd useradd = new UserAddCmd(host, passwd.get(i));
			useradd.adduser(addUser, addPasswd);
		}
	}
	private void sshLogin(){
		String masterhost = addUser + "@" + hosts.get(0);
		
		NoPasswdSSHCmd masterssh = new NoPasswdSSHCmd(masterhost, addPasswd);
		masterssh.masterssh(addUser+"@"+hosts.get(0), addPasswd);
		
			// the master itself also need ssh 
		for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
			String host = addUser + "@" + hosts.get(i);
			logHandler.info("configuring ssh no password login in from " + masterhost 
					+ " to " + host );
			NoPasswdSSHCmd sshcmd = new NoPasswdSSHCmd(masterhost, addPasswd);
			sshcmd.ssh(host, addPasswd);
		}
	}
	/**
	 * Used: add ip.list to localhost /etc/hosts file
	 */
	private void dealLocal(){
		HostsFileGenerator hf = new HostsFileGenerator();
		String host = "localhost";
		hf.transform(host, localUser, localPasswd);
		
		HostsAddCmd add = new HostsAddCmd(localUser + "@" + host, localPasswd);
		add.addhostsfile();
	}
	/**
	 * This method is not tested in Entrance, but tested in ChangeModCmd
	 */
	private void changemod(){
		for(int i = 1 ; i < MACHINE_NUMBER ; i ++){
			String host = addUser + "@" + hosts.get(i);
			ChangeModCmd change = new ChangeModCmd(host, addPasswd);
			change.chmod();
		}
	}
	
	/**
	 * Transfer src to sudoer home folder of each machine
	 * called by dealjdk()
	 * @param src
	 */
//	private void jdksftp(String src){
//		// transfer each Const.JDK_TAR_FILE in home folder 
//		for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
//			FileTransfer ft = new FileTransfer(hosts.get(i), username.get(i), passwd.get(i));
//			ft.transform(src, "/home/" + username.get(i) );
//		}
//	}
	
	private void binsftp(String src){
		for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
			FileTransfer ft = new FileTransfer(hosts.get(i), username.get(i), passwd.get(i));
			ft.transform(src, "/home/" + username.get(i));
		}
	}
	/**
	 * Transfer src to sudoer home folder of each machine
	 * @param src
	 */
	private void hadoopsftp(String src){
		
			// transfer each Const.HADOOP_TAR_FILE under Const.HADOOP_DST_TAG
		for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
			FileTransfer ft = new FileTransfer(hosts.get(i), username.get(i), passwd.get(i));
			ft.transform(src, "/home/" + username.get(i));
		}
	}
	/**
	 * TODO:2012-12-28
	 * Used: deal hadoop using message stored in config file. With following scenarios:
	 * 		1. folder: rewrite masters, slaves using ip.list
	 * 						   core-site.xml, mapred-site.xml, hdfs-site.xml using HadoopConfig
	 * 		2. .tar.gz:tar -zxvf tar file, do as 1
	 * 		Then tar, transform to corresponding path( as input)
	 * finished: 2012-12-29
	 */
	private void dealhadoop(){
		String src = null;
		String dst = null;
		String folderName = null;
		
		try{
			/*
			BufferedReader reader = new BufferedReader(new FileReader(new File(Const.HADOOP_CONF_FILE)));
			String line;
			while((line = reader.readLine()) != null){
				String[] strs = line.split(Const.LINE_SEPERATOR);
				if(strs[0].equals(Const.HADOOP_SRC_TAG))
					src = strs[1];
				else if(strs[0].equals(Const.HADOOP_DST_TAG))
					dst = strs[1];
			}
			*/
			src = property.getString(Const.HADOOP_SRC_TAG);
			dst = property.getString(Const.HADOOP_DST_TAG);
			File sourceFile = new File(src);
			if(sourceFile.exists() == false || src == null || src.equals("") || dst == null || dst.equals("")){
				logHandler.error("invalid parameter from config file!", new NullPointerException());
				return;
			}
			logHandler.info("hadoop src: " + src);
			if(sourceFile.isDirectory()){				
				folderName = sourceFile.getName();
				Runtime.getRuntime().exec("cp -r " + src + " " + Const.HADOOP_SRC_FOLDER);
			}else{
				folderName = getTarGzFolder(sourceFile);
				untar(src, Const.HADOOP_SRC_FOLDER);
			}
			
			property.putString(Const.HADOOP_FOLDER_TAG, folderName);
			String hadoopsrc = Const.HADOOP_SRC_FOLDER + "/" + folderName;
			System.out.println("hadoop src : " + hadoopsrc);
				// modify corresponding files in conf/ for .tar.gz
			if(sourceFile.isFile())
				modifyHadoopFile(hadoopsrc);
			
				// generate .tar.gz for hadoop
			String hadooptar = Const.HADOOP_SRC_FOLDER + "/" + Const.HADOOP_TAR_FILE;
			tar(hadoopsrc, hadooptar);
				// transfer hadoop tar.gz file
			hadoopsftp(hadooptar);
				
			// login as sudoer, run tar, chown command
			for(int i = 0 ; i < MACHINE_NUMBER; i ++){
				String host = username.get(i) + "@" + hosts.get(i);
				ConfigHadoopCmd config = new ConfigHadoopCmd(host, passwd.get(i));
				config.confhadoop();
			}
			String bindir = "/home/" + addUser + "/" + property.getString(Const.HADOOP_DST_TAG)
					+ "/" + property.getString(Const.HADOOP_FOLDER_TAG) + "/bin/";
			bindir = bindir.replaceAll("//", "/");
			for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
				String host = addUser + "@" + hosts.get(i);
				ChangeHadoopModCmd chmod = new ChangeHadoopModCmd(host, addPasswd);
				chmod.chmod(bindir);
			}
			
		}catch(FileNotFoundException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}
	}
	/**
	 * Used: deal jdk with different kind of scenarios:
	 * 		1. folder: compressed it as Const.JDK_TAR_FILE
	 * 		2. ---.bin, install it, get the corresponding jdk in /usr/java, compressed it as Const.JDK_TAR_FILE
	 * 		   ---.tar.gz, copy it as Const.JDK_TAR_FILE
	 * 		Then transfer Const.JDK_TAR_FILE to cluster /home/sudoer/
	 * 		Store the folder name when decompression Const.JDK_TAR_FILE 
	 */
	private void dealjdk(){
		
		String src = null;
		String dst = null;
		
		String folderName = null;
		// TODO:
		try{
			/*
			BufferedReader reader = new BufferedReader(new FileReader(new File(Const.JDK_CONF_FILE)));
			String line;
			while((line = reader.readLine()) != null){
				String[] strs = line.split(Const.LINE_SEPERATOR);
				if(strs[0].equals(Const.JDK_SRC_TAG)){
					src = strs[1];
				}else if(strs[0].equals(Const.JDK_DST_TAG))
					dst = strs[1];
			}
			*/
			src = property.getString(Const.JDK_SRC_TAG);
			dst = property.getString(Const.JDK_DST_TAG);
			File sourceFile = new File(src);
			if(sourceFile.exists() == false || src == null || src.equals("") || dst == null || dst.equals("")){
				logHandler.error("invalid parameter from config file!", new NullPointerException());
				return;
			}
			logHandler.info("src: " + src);
			if(sourceFile.isDirectory()){
				//TODO: transform this directory to corresponding directory
				
				// String tarFilePath = sourceFile.getAbsolutePath().toString();
				// tarFilePath = tarFilePath.substring(0, tarFilePath.lastIndexOf('/') + 1) + Const.JDK_TAR_FILE;
				// logHandler.info("Going to create tar.gz file:\n  " + tarFilePath);
					
					//Const.JDK_TAR_FILE is in workspace directory(may cause problem?)
//				folderName = sourceFile.getName();
				tar(src, Const.JDK_TAR_FILE);
				folderName = getTarGzFolder(new File(Const.JDK_TAR_FILE));
				
			}else{
				//TODO: deal with different kind of files
				if(src.endsWith(".bin")){
					// call ExecJdkBinCmd
//					ExecJdkBinCmd bin = new ExecJdkBinCmd(localUser+"@localhost", localPasswd);
//					folderName = bin.install(src);
//					tar("/usr/java/" + folderName, Const.JDK_TAR_FILE);
					binsftp(src);
					for(int i = 0 ; i < MACHINE_NUMBER ; i ++){
						ExecJdkBinCmd bin = new ExecJdkBinCmd(username.get(i) + "@" + hosts.get(i), passwd.get(i));
						folderName = bin.install("/home/" + username.get(i) + "/" + sourceFile.getName());
					}
					
								
				}else if(src.endsWith(".tar.gz")){
//					logHandler.error("src: " + src);
//					Runtime.getRuntime().exec("cp " + src + " " + Const.JDK_TAR_FILE);
					Runtime.getRuntime().exec("cp " + src + " .");
//					File jdk = new File(Const.JDK_TAR_FILE);
					folderName = getTarGzFolder(new File(src));	
					File jdk = new File(src);
					Runtime.getRuntime().exec("mv " + jdk.getName() + " " + Const.JDK_TAR_FILE);
					logHandler.info("tar.gz folder: " + folderName);
				}
			}
			
			property.putString(Const.JDK_FOLDER_TAG, folderName);
//			jdksftp(Const.JDK_TAR_FILE);
//			for(int i = 0 ; i < MACHINE_NUMBER; i ++){
//				String host = username.get(i) + "@" + hosts.get(i);
//				LocateJDKCmd locate = new LocateJDKCmd(host, passwd.get(i));
//				locate.confjdk();
//			}
			
		}catch(FileNotFoundException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + e.getMessage(), e);
		}
	}
	
	/**
	 * Used: change the hadoop conf/ content, called by dealhadoop();
	 * @param folderName
	 */
	private void modifyHadoopFile(String folderName){
		File hadoopfile = new File(folderName);
		if(hadoopfile.exists() == false){
			logHandler.error("File " + folderName + " doesn't exist!");
			return;
		}
	
			//masters and slaves:
			//		masters: first one from hosts
			//		slaves:  the rest from hosts
		
		try{
			File slaves = new File(folderName + "conf/slaves");
			File masters = new File(folderName + "conf/masters");
			if(slaves.exists() == false || masters.exists() == false){
				logHandler.error("no masters/slaves file under " + folderName + "/conf/");
				return;
			}
			boolean both = Boolean.parseBoolean(property.getString(Const.MASTER_IS_SLAVE_TAG));
			BufferedWriter masterwriter = new BufferedWriter(new FileWriter(masters));
			BufferedWriter slavewriter = new BufferedWriter(new FileWriter(slaves));
			Iterator<String> iter = hosts.iterator();
			if(iter.hasNext()){
				String str = iter.next();
				masterwriter.write(str + "\n");
				if(both == true)
					slavewriter.write(str + "\n");
			}				
			while(iter.hasNext()){
				slavewriter.write(iter.next() + "\n");
			}
			masterwriter.close();
			slavewriter.close();
			
		}catch(IOException e){
			logHandler.error("masters/slaves Writer Exception " + this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
			
			//hadoop-env.sh:
			//		export JAVA_HOME=/usr/java/jdk1.6.0_29
			//		export HADOOP_HOME=/home/deke/yt/hadoop/hadoop-0.20.2
			//		export HADOOP_LOG_DIR=/home/deke/yt/hadoop/logs
		String str1 = "";
		try{
			File envfile = new File(folderName + "conf/hadoop-env.sh");
			if(envfile.exists() == false){
				logHandler.error("no hadoop-env.sh file under " + folderName + "/conf/");
				return;
			}
			str1 = property.getString(Const.HADOOP_DST_TAG);
			if(str1.charAt(str1.length() - 1) == '/' )
				str1 = str1.substring(0, str1.length() - 1);
			
			String str2 = property.getString(Const.JDK_DST_TAG);
			if(str2.charAt(str2.length() - 1) == '/')
				str2 = str2.substring(0, str2.length() - 1);
			
			String hadoopdir = "/home/" + property.getString(Const.HADOOP_USER_TAG)+ "/";
			String javahome = str2 + "/" + property.getString(Const.JDK_FOLDER_TAG);
			String hadoophome = hadoopdir + str1 + "/" + property.getString(Const.HADOOP_FOLDER_TAG);
			String hadooplog = hadoopdir + str1 + "/logs";
			
			RandomAccessFile raf = new RandomAccessFile(envfile, "rw");
			long length = raf.length();
			raf.seek(length);
			raf.write(("export JAVA_HOME=" + javahome + "\n").getBytes());
			raf.write(("export HADOOP_HOME=" + hadoophome + "\n").getBytes());
			raf.write(("export HADOOP_LOG_DIR=" + hadooplog + "\n").getBytes());
			raf.close();
			// TODO: open hadoop-env.sh and append the export to it
		}catch(IOException e){
			logHandler.error("RandomAccessFile Exception " + this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
		
			//core-site.xml, mapred-site.xml, hdfs-site.xml
			//		add node to each xml file with HadoopConfig
		String hadoopPath = "/home/" + property.getString(Const.HADOOP_USER_TAG) + "/" + str1;
		Map<String, String> info = new HashMap<String, String>();
		info.put(HadoopConst.MASTER_HOST_TAG, hosts.get(0));
		info.put(Const.HADOOP_DST_TAG, hadoopPath);
		HadoopConfig coreConfig = null, mapredConfig = null, hdfsConfig = null;
		coreConfig = new HadoopConfig(folderName + "conf/" + HadoopConst.CORE_SITE_FILE, info);
		mapredConfig = new HadoopConfig(folderName + "conf/" + HadoopConst.MAPRED_SITE_FILE, info);
		hdfsConfig = new HadoopConfig(folderName + "conf/" + HadoopConst.HDFS_SITE_FILE, info);
		
		coreConfig.update();
		mapredConfig.update();
		hdfsConfig.update();
		
	}
	/**
	 * Used: store the jdk folder name to Const.JDK_CONF_FILE
	 * @param folderName
	 */
	/*
	private void storeFolderName(String folderName){
		try{
			boolean append = true;
			String newContent = Const.JDK_FOLDER_TAG + Const.LINE_SEPERATOR + folderName + "\n";
			BufferedReader reader = new BufferedReader(new FileReader(Const.JDK_CONF_FILE));
			String line = null;
			StringBuffer buffer = new StringBuffer();
			while((line = reader.readLine()) != null){
				String[] strs = line.split(Const.LINE_SEPERATOR);
				if(strs[0].equals(Const.JDK_FOLDER_TAG)){
					buffer.append(newContent);
					append = false;
				}
				else
					buffer.append(line + "\n");
			}
			if(append == true)
				buffer.append(newContent);
			reader.close();
			
			BufferedWriter writor = new BufferedWriter(new FileWriter(Const.JDK_CONF_FILE));
			writor.write(buffer.toString());
			writor.close();
			
		}catch(FileNotFoundException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
	}
	*/
	/**
	 * Used: Getting .tar.gz de-compressed folder name
	 * @param file
	 * @return foldername
	 */
	private String getTarGzFolder(File file){
		String filename = null;
		TarInputStream tarIn = null;
		try{
			tarIn = new TarInputStream(new GZIPInputStream(new FileInputStream(file)));
				// I dont't know why i should use getNextEntry() twice to get the upper folder name!
				// My Answer: That .tar.gz has some problems...... 
//			if(file.isFile())
//				tarIn.getNextEntry();
			TarEntry entry = tarIn.getNextEntry();
			if(entry.isDirectory()){
				filename =  entry.getName();
			}else{
				logHandler.error(file + "is invalid!");
			}
			tarIn.close();
		}catch(IOException e){
			logHandler.error(this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
		return filename;
	}
	/**
	 * Used to untar src to Const.HADOOP_SRC_FOLDER
	 * @param src
	 */
	private void untar(String src, String dst){
		try{
			System.out.println("untar to " + dst);
			TarGzGenerator targz = new TarGzGenerator();
			targz.getUnTarGz(src, dst);
		}catch(RuntimeException e){
			logHandler.error("untar Error : " + this.getClass().getSimpleName() + " " + e.getMessage(), e);
		}
	}
	/**
	 * Used to tar src to Const.JDK_TAR_FILE
	 * @param src
	 */
	private void tar(String src, String dst){
		try{
			System.out.println("Run tar:");
			TarGzGenerator targz = new TarGzGenerator();
			targz.getTarGz(src, dst);
			
				// cannot get proper tar file! I don't know why...
//			Runtime.getRuntime().exec(new String[]{"/bin/sh", "-c", "tar -zcvf " + Const.JDK_TAR_FILE + " " + src}, null, null);
		
		}catch(RuntimeException e){
			logHandler.error("tar error : " + this.getClass().getSimpleName() + e.getMessage(), e);
		}
	}
	
//	private static int count = 0;
	public void Run(){
		init();
		readin();
		dealLocal();
		sftp();
		appendHostFile();
		addHadoopUser();
		sshLogin();
		changemod();
		dealjdk();
		dealhadoop();
		
	}
		//TODO: test the function of full configuration
		//		modify UI
	public static void main(String[] args) throws IOException{
//		Runtime.getRuntime().exec("tar -zxf /home/deke/yt/software/hadoop-0.20.2.tar.gz hadoop/");
		Entrance entrance = new Entrance();
//		entrance.untar("/home/deke/yt/software/hadoop-0.20.2.tar.gz", "hadoop/");
		entrance.Run();
	}
}
