package com.wsc.controller;


import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;

public class CrawlerControle {
	ArrayList<String> crawler_name = new ArrayList<String>();
	ArrayList<String> crawler_ip = new ArrayList<String>();
	ArrayList<String> crawler_user = new ArrayList<String>();
	ArrayList<String> crawler_pingstatus = new ArrayList<String>();
	String CrawlerControlelogfile;
	int port;//place default port of the all crawler servers here
	
	public void logCreation() throws IOException{
		CrawlerControlelogfile = Logger.logfile("CrawlerControle");
		Logger.writeLog(CrawlerControlelogfile,"logfilecreation is done\n");
	}
	
	public void setPort(int port){
		this.port = port;
	}
	
	public boolean getCrawlerIps() throws IOException{//get the details of crawlers from configuration file
		Logger.writeLog(CrawlerControlelogfile,"getting crawler ips\n");
		String cname,cip,cuser;
		try {
			File fXmlFile = new File("C:\\test\\frontier\\dom\\slave-nodes.xml");
			DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
			DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
			Document doc = dBuilder.parse(fXmlFile);
	        doc.getDocumentElement().normalize();
		        
	        NodeList nList = doc.getElementsByTagName("crawler");
	        for (int temp = 0; temp < nList.getLength(); temp++) {
	        	Node nNode = nList.item(temp);
                if (nNode.getNodeType() == Node.ELEMENT_NODE) {
                    Element eElement = (Element) nNode;
					cname = eElement.getElementsByTagName("name").item(0).getTextContent();
					cip =  eElement.getElementsByTagName("ip").item(0).getTextContent();
				    cuser = eElement.getElementsByTagName("user").item(0).getTextContent();
			    	crawler_name.add(cname);
			    	crawler_ip.add(cip);
			    	crawler_user.add(cuser);
                }//end of if clause
		    }//end of for loop with s var
            Logger.writeLog(CrawlerControlelogfile,"crawler ips are successfully read\n");
            return true;
        }
		catch (SAXParseException err) {
			Logger.writeLog(CrawlerControlelogfile,"following error occured while reading crawler_nodes.xml file\n");
			Logger.writeLog(CrawlerControlelogfile,"** Parsing error" + ", line " 
				    + err.getLineNumber () + ", uri " + err.getSystemId ()+"\n");
			//System.out.println ("** Parsing error" + ", line " 
		    //+ err.getLineNumber () + ", uri " + err.getSystemId ());
			Logger.writeLog(CrawlerControlelogfile," " + err.getMessage ()+"\n");
		    //System.out.println(" " + err.getMessage ());
			return false;
        }
		catch (SAXException e) {
			Logger.writeLog(CrawlerControlelogfile,"following error occured while reading crawler_nodes.xml file\n");
			Logger.writeLog(CrawlerControlelogfile, "SAXException :"+e+"\n");
			Exception x = e.getException ();
		    ((x == null) ? e : x).printStackTrace ();
		    return false;
        }
		catch (Throwable t) {
			Logger.writeLog(CrawlerControlelogfile,"following error occured while reading crawler_nodes.xml file\n");
			Logger.writeLog(CrawlerControlelogfile, "Throwable exception :"+t+"\n");
			//Logger.writeLog(FIStartuplogfile,t.printStackTrace());
			return false;
		}
	}
	
	public void crawlerPinging() throws IOException{//ping status of every crawler node is stored here
		Logger.writeLog(CrawlerControlelogfile,"pinging of crawler nodes started\n");
		String host="";
		for(int i=0;i<crawler_ip.size();i++){
			host = (String)crawler_ip.get(i);
			Logger.writeLog(CrawlerControlelogfile,"pinging of "+host+" crawler node started\n");
			int timeout = 3000;
			try{
				boolean status = InetAddress.getByName(host).isReachable(timeout);
				Logger.writeLog(CrawlerControlelogfile,"pinging of "+host+" crawler node is done\n");
				if(!status){
					Logger.writeLog(CrawlerControlelogfile,"following error occured during pinging of "+host+" crawler node\n");
					Logger.writeLog(CrawlerControlelogfile,host+" Crawler node Host can't be pinged (node is not started) \n");
					crawler_pingstatus.add("no");
				}
				else{
					crawler_pingstatus.add("yes");
				}
			}
			catch(UnknownHostException e){
				Logger.writeLog(CrawlerControlelogfile,"following error occured during pinging of "+host+" crawler node\n");
				Logger.writeLog(CrawlerControlelogfile,host+" crawler node Host does not exists \n");
				//System.err.println("Host does not exists \n check log record for futher details");
				//exit(0);
				crawler_pingstatus.add("no");
			}
			catch(Exception e){
				Logger.writeLog(CrawlerControlelogfile,"following error occured during pinging of "+host+" crawler node\n");
				Logger.writeLog(CrawlerControlelogfile,host+" crawler node pinging exception :"+e+"\n");
				crawler_pingstatus.add("no");
			}
			
		}
	}
	
	public void startCrawlerNodes() throws IOException{//it will start every crawler node whose ping status is yes
		Logger.writeLog(CrawlerControlelogfile,"Starting Crawler nodes\n");
		String pingstatus;
		for(int i=0;i<crawler_pingstatus.size();i++){
			pingstatus = (String)crawler_pingstatus.get(i);
			if(pingstatus.equals("yes")){
				startNode(i);//start the crawler node with specified index
			}
		}
	}
	
	public void startNode(int index) throws IOException{//it uses this index as a reference and up the crawler
		String cmd,username,ip;
		ip = (String)crawler_ip.get(index);
		username = (String)crawler_user.get(index);
		Logger.writeLog(CrawlerControlelogfile,"Starting of "+ip+" crawler node is started\n");
		cmd= "ssh "+username+"@"+ip+" ~/wsc/bin/start_crawler.sh\n";
		try{
			Process p = Runtime.getRuntime().exec(cmd);
			p.waitFor();
			 
			BufferedReader r = new BufferedReader(new InputStreamReader(p.getInputStream()));
			Logger.writeLog(CrawlerControlelogfile,"following is the out put during the start up of "+ip+" crawler node\n");
			while(r.ready()){
				Logger.writeLog(CrawlerControlelogfile,r.readLine()+"\n");
			} 
			Logger.writeLog(CrawlerControlelogfile,"Starting of "+ip+" crawler node is done\n");
		}
		catch(Throwable t){
			Logger.writeLog(CrawlerControlelogfile,"following exception occured during the starting of "+ip+" crawler node\n");
			Logger.writeLog(CrawlerControlelogfile,"throwable exception is :"+t+"\n");
		}	
	}
	
	public void newCrawlerStrartup() throws IOException{
		String pingstatus;
		for(int i=0;i<crawler_pingstatus.size();i++){
			pingstatus = (String)crawler_pingstatus.get(i);
			if(pingstatus.equals("no")){
				if(pingNewCrawler(i)){
					crawler_pingstatus.set(i, "yes");
					startNode(i);
				}
			}
		}
		
	}
	
	public boolean pingNewCrawler(int index) throws IOException{
		String host="";
		host = (String)crawler_ip.get(index);
		int timeout = 3000;
		try{
			boolean status = InetAddress.getByName(host).isReachable(timeout);
			return status;
		}
		catch(UnknownHostException e){
			return false;
		}
		catch(Exception e){
			return false;
		}
	}
	
	public void checkCrawlers(String ControllerMaintenancelogfile) throws IOException, Exception{
		ControllerClient cclient;
		String pingstatus,ip;
		int statuscode;
		for(int i=0;i<crawler_pingstatus.size();i++){
			pingstatus = (String) crawler_pingstatus.get(i);
			if(pingstatus.equals("yes")){
				ip = (String) crawler_ip.get(i);
				cclient = new ControllerClient();
				cclient.sendRequest(ip,port,"heartbeat");
				statuscode = cclient.statuscode;
				if(statuscode != 200){
					Logger.writeLog(ControllerMaintenancelogfile,"crawler node "+ip+"is not responding.....it may be down\n");
					Logger.writeLog(ControllerMaintenancelogfile,"trying to start up crawler node "+ip+" again...");
					if(pingNewCrawler(i)){
						startNode(i);
						Logger.writeLog(ControllerMaintenancelogfile,"crawler node "+ip+"is up again\n");
					}
					else{
						Logger.writeLog(ControllerMaintenancelogfile,"crawler node "+ip+"is unable to ping.....it is down\n");
					}
					
				}
				else{
					Logger.writeLog(ControllerMaintenancelogfile,"crawler node "+ip+" is working fine\n");
				}
			}
		}
	}
	
	public void shutdownCrawlers() throws Exception{
		String ip;
		ControllerClient cclient;
		for(int i=0;i<crawler_ip.size();i++){
			ip = crawler_ip.get(i);
			cclient = new ControllerClient();
			cclient.sendRequest(ip, port, "shutdown");
			//make it conform that all crawler are shutdown from here
		}
	}
}
