import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Properties;
import org.htmlparser.util.ParserException;
import org.jdom.JDOMException;
import websphinx.Link;

/**
 * @author ofer rotberg
 *
 */
public class AppTester {

	private static ArrayList<ExtendedLink> getWebLinksFromFile(String dirName, String fileName)
	throws MalformedURLException, IOException{
		ArrayList<ExtendedLink>webLinks = new ArrayList<ExtendedLink>();
		BufferedReader br = new BufferedReader(new FileReader(new File(dirName,fileName)));
		String currentUrl = br.readLine();
		while (currentUrl!=null){
			currentUrl = currentUrl.replaceAll("newline", "\\n");
			ExtendedLink link = new ExtendedLink(new Link(currentUrl));
			webLinks.add(link);
			currentUrl = br.readLine();
		}
		return (webLinks);
	}

	public  static int CreateUrlList(String filename,int count) throws IOException, DetectorException, JDOMException{
		String mode = "reg";
		int counter = 0;

		if (filename.indexOf("attack")>-1)
			mode="atk";
		BufferedReader br = new BufferedReader(new FileReader(filename));
		BufferedWriter bw = new BufferedWriter(new FileWriter("urls_" + filename));
		Fuzzer fuzzer = null;
		String fullUrl = null,curr=null;
		ArrayList<String> al = null;
		curr = br.readLine();
		while (curr!=null){
			//bw.write(curr);
			fuzzer = new Fuzzer(new Query(curr));
			fuzzer.buildUrlsList(count,mode);
			al = fuzzer.getUrls();
			for (String  u : al){
				fullUrl = (curr.substring(0,curr.indexOf('?')+1) + u);
				fullUrl = fullUrl.replaceAll("\\n", "newline");
				bw.write(fullUrl + "\r\n");
				counter+=1;
			}
			curr  = br.readLine();
		}
		bw.close();
		return counter;
	}

	public static int GetUserInput(String question) throws NumberFormatException, IOException{
		BufferedReader b = new BufferedReader(new InputStreamReader(System.in));
		System.out.println(question);
		return(Integer.valueOf(b.readLine()));
	}

	public static void LearnApp(String app,double p) throws IOException, ParserException, DetectorException{

		//set working directory
		String currDir = System.getProperty("user.dir");
		File baseDir = new File (currDir + "\\fpResults");

		//open the file that holds the urls in this application
		String dirName = new DataSetBuilder().BuildDirectoryName(app);
		File dir = new File(baseDir,dirName);
		String fileName = new DataSetBuilder().BuildFileName(app);
		File f = new File(fileName);

		boolean success = false;
		if (!(dir.exists() && dir.isDirectory())){
			success = (dir.mkdir());
			if (!success){
				throw new DetectorException("Failed to create directory for learn");
			}
		}

		//delete all files in the directory
		/*if (dir.listFiles().length>0)
			for (File file : dir.listFiles())
				file.delete();*/

		//open file that will hold the scripts DB
		DetectorResults dr = null;

		//open detector log file
		Log learnLog = new Log("learnLog.txt");

		//initialize scripts DB class
		ScriptsDB2 sd = new ScriptsDB2("LEARN",learnLog);

		//build links table from file
		ArrayList<ExtendedLink> webLinks = null;
		if (f.exists() && f.isFile())
			webLinks = getWebLinksFromFile(dir.toString(),f.getName());
		else
			throw new DetectorException(fileName +" doesn't exist");

		//learn P percent of random url's from regular links table (0<P<1)
		dr = sd.learn(webLinks,p);

		//save ScriptsDB object to file for further use (detection)
		ArrayList<ScriptNode> acriptsArray = sd.getAppScripts();
		FileOutputStream fos = null;
		ObjectOutputStream out = null;
		fos = new FileOutputStream("scriptsDB.txt");
		out = new ObjectOutputStream(fos);
		out.writeObject(acriptsArray);
		out.close();

		//save learnedUrls object to file for future use (detection)
		HashMap <Integer,String> learnedUrls = sd.getLearnedURLs();
		fos = null;
		out = null;
		fos = new FileOutputStream("learnedUrls.txt");
		out = new ObjectOutputStream(fos);
		out.writeObject(learnedUrls);
		out.close();

		//close log files
		learnLog.closeLog();

		FileWriter fw = new FileWriter(new File("stats.txt"));
		BufferedWriter bw = new BufferedWriter(fw);
		int cnt=1;
		bw.write("JavaScript convergence for "+ app + "\r\n");
		bw.write("Number of pages\r\n");
		bw.write("#\r\n");
		bw.write("Incremental number of new JS\r\n");
		bw.write("#\r\n");
		bw.write("3\r\n");
		bw.write(dr.getNewScripts().size() + "\r\n");
		bw.write(dr.getNewScripts().size() + "\r\n");
		for (int n:dr.getNewScriptsIncremental()){
			bw.write(cnt + " " + n + "\r\n");
			cnt++;
		}
		cnt=1;
		bw.write(dr.getNewScriptIncrementalSorted().size() + "\r\n");
		for (int n:dr.getNewScriptIncrementalSorted()){
			bw.write(cnt + " " + n + "\r\n");
			cnt++;
		}
		cnt=1;
		bw.write(dr.getNewScripts().size() + "\r\n");
		for (int n:dr.getNewScripts()){
			bw.write(cnt + " " + n + "\r\n");
			cnt++;
		}
		bw.close();

		Plotter plotter = new Plotter("stats.txt");
		plotter.Plot();

		XYChart xyc = new XYChart();
		File chart = xyc.Plot(dr, app);

		//move learning files to app directory
		File db1 = new File("scriptsDB.txt");
		File log = new File("learnLog.txt");
		File urls = new File("learnedUrls.txt");
		File db2 = new File("LrnDB.txt");
		File stat = new File("stats.txt");

		boolean success1 = db1.renameTo(new File(dir, db1.getName()));
		boolean success2 = log.renameTo(new File(dir, log.getName()));
		boolean success3 = urls.renameTo(new File(dir, urls.getName()));
		boolean success4 = db2.renameTo(new File(dir, db2.getName()));
		boolean success5 = stat.renameTo(new File(dir, stat.getName()));
		boolean success6 = true;
		if (chart!=null)
			success6 = chart.renameTo(new File(dir, chart.getName()));

		if (!(success1&&success2&&success3&&success4&&success5&&success6)) {
			throw new DetectorException ("File was not successfully moved");
		}
	}

	public static void AddUrls(Query q, String par){

	}

	public static void DetectApp(String app,double p,int mode) throws IOException, ClassNotFoundException, DetectorException, ParserException{
		//set working directory
		String currDir = System.getProperty("user.dir");
		File baseDir = new File (currDir + "\\fpResults");

		//open the directory that holds the learning results for this application
		String dirName = new DataSetBuilder().BuildDirectoryName(app);
		File dir = new File(baseDir,dirName);

		//open the file that holds the urls in this application
		String fileName = new DataSetBuilder().BuildFileName(app);
		File f = new File(dir,fileName);

		//build links table from file
		ArrayList<ExtendedLink> webLinks = null;
		if (f.exists() && f.isFile())
			webLinks = getWebLinksFromFile(dir.toString(),f.getName());
		else
			throw new DetectorException(fileName +" doesn't exist");

		//if detection in FN mode, need to add more benign and attack URL's
		if (mode==2){
			BufferedReader b = new BufferedReader(new InputStreamReader(System.in));
			System.out.println("Enter Query: ");
			String url = b.readLine();
			Query q = new Query(url);
			HashMap<String,String> params = q.getParams();
			String par = null;
			while (params.containsKey(par)){
				b = new BufferedReader(new InputStreamReader(System.in));
				System.out.println("Enter parameter: ");
				par = b.readLine();
			}
			//webLinks = AddUrls(q,p);
		}
		//sets log file
		Log log = new Log("detectLog.txt");

		//load the scripts DB 
		FileInputStream fis = null;
		ObjectInputStream in = null;
		fis = new FileInputStream(baseDir +"\\"+ dirName+"\\scriptsDB.txt");
		in = new ObjectInputStream(fis);
		ArrayList<ScriptNode> scriptsArray = (ArrayList<ScriptNode>)in.readObject();
		log.WriteToLog("There are " + scriptsArray.size() + " scripts in DB",false);
		in.close();

		//load the learned URLs object
		fis = null;
		in = null;
		fis = new FileInputStream(baseDir +"\\"+ dirName+"\\learnedUrls.txt");
		in = new ObjectInputStream(fis);
		HashMap <Integer,String> learnedUrls = (HashMap<Integer,String>)in.readObject();
		log.WriteToLog(learnedUrls.size() + " major URL's were learned by the detector",false);
		in.close();

		ScriptsDB2 sd = new ScriptsDB2("DETECT",log);

		DetectorResults dr = sd.detect(scriptsArray, webLinks, learnedUrls, p, "fp");

		//write results to log

		log.WriteToLog("Finished detection of " + app, false);
		log.WriteToLog("Found " + (dr.getNumUrlWithRealAttack()+dr.getNumUnLearnedUrlWithAttack()) 
				+" FP cases", false);
		log.WriteToLog("Unlearned FP URLs: "+dr.getNumUnLearnedUrlWithAttack(), false);
		log.WriteToLog("FP JS: "+dr.getNumScriptsWithRealAttack(), false);
		log.WriteToLog("Found " + dr.getNumUrlWithFalseAttack() + " Url's with normalized JS", false);
		log.WriteToLog("Found " + dr.getNumScriptsWithFalseAttack() + " normalized scripts", false);
		log.WriteToLog("The overlapping factor is " + (double)(dr.getOverlappingFactor()/webLinks.size()), false);


		//move detection files to app directory
		log.closeLog();
		File db1 = new File("detectLog.txt");
		File db2 = new File("DetDB.txt");


		boolean success1 = db1.renameTo(new File(dir, db1.getName()));
		boolean success2 = db2.renameTo(new File(dir, db2.getName()));


		if (!(success1&&success2)) {
			throw new DetectorException ("File was not successfully moved");
		}
	}

	public static void main(String[] args) {
		//Configure web-proxy settings in IDC CS Lab
		Properties props = System.getProperties();
		props.setProperty("http.proxyHost", "isa.idc.ac.il");
		props.put("http.proxyPort", "8080");

		Log crawlLog = null, learnLog = null, detectLog = null;
		BufferedWriter bw = null;
		int appMode =0,crawlMode=0,tmp=0;
		String currApp=null;
		FileReader fr = null;
		BufferedReader br = null;


		try {
			//check if crawling is needed
			crawlMode = GetUserInput("(1) crawl \r\n(2) don't crawl");
			DataSetBuilder dsb;
			if (crawlMode==1){
				dsb = new DataSetBuilder("sites.txt");
				System.out.println("\r\nCrawling Phase Finished");
			}

			//ask user for mode (learning/detection)
			appMode = GetUserInput("(1) learn\r\n(2) detect");
			/*
					//add some benign urls
					tmp = CreateUrlList("query.txt", 4);
					System.out.println("Finished benign fuzzing. The fuzzer created " + tmp + " urls");
				}
				if (crawlMode==3 || crawlMode==4){
					//add some attacks urls 
					tmp = CreateUrlList("attack.txt", 2);
					System.out.println("Finished attack fuzzing. The fuzzer created " +tmp + " urls");
				}
				//bw2.close();
			}
			 */


			if (appMode==1){ //Learning phase

				//ask the user what fraction of total urls he wants to learn from each App
				double fraction = (double)(GetUserInput("Please enter P, the part (%)  you want to learn (0<P<100)")/100f);

				fr = new FileReader(new File("sites.txt"));
				br = new BufferedReader(fr);
				currApp=br.readLine();
				while (currApp!=null){
					System.out.println("\r\nLearning application: " + currApp);
					LearnApp(currApp,fraction);
					currApp=br.readLine();
				}
				System.out.println("\r\nFinished learning all applications");
			}

			if (appMode==2){//detection mode

				int detectMode = GetUserInput("Press (1) to false positive detection or (2) to false negative detection: ");
				double p = (double)(GetUserInput("Please enter P, the part you want to detect (0<P<1)"));

				fr = new FileReader(new File("sites.txt"));
				br = new BufferedReader(fr);
				currApp=br.readLine();
				while (currApp!=null){
					System.out.println("Detecting " + currApp);
					DetectApp(currApp,p,detectMode);
					currApp=br.readLine();
				}
				System.out.println("\r\nFinished detection of all applications");
			}

			/*
				DetectorResults FPresults = new DetectorResults();
				DetectorResults FNresults = new DetectorResults();

				//open detector log file
				//detectLog = Log.getInstance("detectLog.txt");


				ScriptsDB2 sd = new ScriptsDB2("DETECT");
				sd.setLearnedURLs(learnedUrls);
				ArrayList<ExtendedLink> webLinks = null;
				//build links table from file
				if (mode==1){//false positive detection
					System.out.println("Starting detection of benign traffic....");
					System.out.println("------------------------------------------");
					webLinks = getWebLinksFromFile("allUrls.txt");
					FPresults=sd.detect(scriptsArray,webLinks,p,"fp");
				}
				else if (mode==2){//attack detection
					System.out.println("Starting detection of attack traffic....");
					System.out.println("------------------------------------------");
					webLinks = getWebLinksFromFile("urls_attack.txt");
					//webLinks = getWebLinksFromFile("test_attack.txt");
					FNresults=sd.detect(scriptsArray,webLinks,p,"fn");
				}
				else if (mode==3){
					System.out.println("Starting detection of benign traffic....");
					System.out.println("------------------------------------------");
					webLinks = getWebLinksFromFile("allUrls.txt");
					FPresults=sd.detect(scriptsArray,webLinks,p,"fp");
					System.out.println("Starting detection of attack traffic....");
					System.out.println("------------------------------------------");
					webLinks = getWebLinksFromFile("urls_attack.txt");
					FNresults=sd.detect(scriptsArray,webLinks,p,"fn");
				}
				//print results
				double examinedUrls = Math.ceil((double)p*webLinks.size());
				System.out.println("\r\nDetection phase finished.");
				if (mode==1 || mode==3){
					System.out.println("FP test results");
					System.out.println("----------------");
					System.out.println("FP Overlaping factor = " + FPresults.getOverlappingFactor()/examinedUrls);
					System.out.println("# URL's causing an alarm if not using normalization = " + FPresults.getNumRegularUrlWithAttackScripts() + 
							", FP rate = " + (float)(FPresults.getNumRegularUrlWithAttackScripts()/examinedUrls));
					System.out.println("# scripts attacks detected if not using normalization = " + FPresults.getNumUnNormalizedAttackScripts() + 
							", FP rate = " + (double)(FPresults.getNumUnNormalizedAttackScripts()/scriptsArray.size()));
					System.out.println("# URL's not in learned url's that cause an alaram = " + FPresults.getNumUnLearnedUrlsWithAttack());
					System.out.println("# URL's causing an alarm using normalization = " + FPresults.getNumNormalizedUrlWithAttackScripts() + 
							", FP rate = " + FPresults.getNumNormalizedUrlWithAttackScripts()/(Math.ceil((double)p*webLinks.size())));
					System.out.println("# scripts attacks detecetd using normalization = " + FPresults.getNumNormalizedAttackScripts() + 
							", FP rate = " + FPresults.getNumNormalizedAttackScripts()/scriptsArray.size());
					detectLog.WriteToLog("\r\n\r\nFalse Positive test finished." + FPresults.getNumNormalizedUrlWithAttackScripts() +" attack URL's were detected",false);
				}
				else if (mode==2 || mode==3){
					System.out.println("FN test results");
					System.out.println("----------------");
					System.out.println("FNOverlaping factor = " + FNresults.getOverlappingFactor()/examinedUrls);
					if (FNresults.getEffectiveUrlTested()>0){
						System.out.println("Effective Attacks: " + FNresults.getEffectiveUrlTested());
						System.out.println("Detection rate = " +
								(float)FNresults.getNumNormalizedUrlWithAttackScripts()/FNresults.getEffectiveUrlTested());
					}
					else
						System.out.println("No effective attacks.");
				}

				detectLog.closeLog();
			 */	
			System.exit(0);
		}catch (NumberFormatException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (DetectorException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ParserException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
