package westh.ilib.service.surveyReport.reportGeneration;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;

import org.apache.lucene.search.Query;

import westh.ilib.service.surveyReport.queryGeneration.Generator;
import westh.ilib.service.surveyReport.documentSearch.docSearcher;

public class ClassifierTraining {
	static PorterStemmer stemmer = new PorterStemmer();
	/*
	private static void stemAttributes() {
		// stem the attributes
		attributes = stemmer.stemWords(attributes);
		for (int i = 0; i < attributes.length; i++) {
			System.out.println(attributes[i]);
		}
	}
	
	Static String[] attributes = { "we", "paper", "work", "show", "present",
	  "describ", "propose", "introduce", "design", "implement", "adopt", "use",
	  "demostrate", "algorithm", "approach", "finding", "method", "technique",
	  "model", "evaluate", "estimate" };
	*/
	
	// selected attributes after stemming by Porter Stemmer
	static String[] attributes = { "we", "paper", "work", "show", "present",
			"describe", "propos", "introduc", "design", "implement", "adopt",
			"us", "demostr", "algorithm", "approach", "find", "method",
			"techniqu", "model", "evalu", "estim" };

	private static void tagging(String indexPath, String wordnetIndexPath, String taggedPath) {
		// query generation
		Generator gen = new Generator("search engine");
		Query query = gen.generator(wordnetIndexPath);

		// document search
		int n = 50;
		float[] scores = new float[n];
		String[] years = new String[n];
		String[] confs = new String[n];
		String[] titles = new String[n];
		String[] keywords = new String[n];
		String[] summarys = new String[n];
		int[] rscType = new int[n];
		docSearcher.search(indexPath, query, n, scores, years, confs, titles, keywords,
				summarys, rscType);

		// human interact to train classifier
		SummGenerator summGen = new SummGenerator(n, scores, years, confs,
				titles, keywords, summarys, rscType);

		try {
			File tagged = new File(taggedPath);
			if (!tagged.canWrite()) {
				System.out.println("Error: " + taggedPath + " can't write.");
				return;
			}
			if (tagged.exists()) {
				tagged.delete();
			}
			BufferedWriter bw = new BufferedWriter(new FileWriter(tagged));
			bw.write("@relation sentence_selection");
			bw.newLine();
			bw.newLine();
			bw.write("@attribute text string");
			bw.newLine();
			bw.write("@attribute class {0,1}");
			bw.newLine();
			bw.newLine();
			bw.write("@data");
			bw.newLine();
			for (int i = 0; i < summGen.docs.size(); i++) {
				if (summGen.docs.get(i) != null) {
					// print out the document
					System.out.println("doc no." + i);
					System.out.println(summGen.docs.get(i).year + "\t"
							+ summGen.docs.get(i).conf + "\t"
							+ summGen.docs.get(i).score + "\t"
							+ summGen.docs.get(i).title);
					System.out.println("keyword: " + summGen.docs.get(i).keyword);
					System.out.println("sentence num: "
							+ summGen.docs.get(i).sents.size());
					for (int j = 0; j < summGen.docs.get(i).sents.size(); j++) {
						System.out.println(summGen.docs.get(i).sents.get(j).paraPos
								+ " " + summGen.docs.get(i).sents.get(j).sentPos
								+ " " + summGen.docs.get(i).sents.get(j).text);
					}
					System.out.println();

					// print out each sentence & tag it as 'y'/'n' for choosed
					// or not choosed
					for (int j = 0; j < summGen.docs.get(i).sents.size(); j++) {
						System.out.println(summGen.docs.get(i).sents.get(j).paraPos
								+ " " + summGen.docs.get(i).sents.get(j).sentPos
								+ " " + summGen.docs.get(i).sents.get(j).text);

						int choose = System.in.read();
						if (choose == 'y') {
							bw.write("'" + summGen.docs.get(i).sents.get(j).text
									+ "',1");
						} else {
							bw.write("'" + summGen.docs.get(i).sents.get(j).text
									+ "',0");
						}
						bw.newLine();
						System.out.println(choose == 'y');

						// read in the \n\r
						System.in.read();
						System.in.read();
					}
					System.out.println();
				}
			}
			bw.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	private static void arffGenerator(String taggedPath, String trainingSetPath) {
		try {
			//stemAttributes();

			File infile = new File(taggedPath);
			BufferedReader br = new BufferedReader(new FileReader(infile));

			// output .arff for weka
			File outfile = new File(trainingSetPath);
			if (outfile.exists()) {
				outfile.delete();
			}
			// write @relation
			BufferedWriter bw = new BufferedWriter(new FileWriter(outfile));
			bw.write("@relation sentence_selection");
			bw.newLine();
			bw.newLine();
			// write @attribute
			for (int i = 0; i < attributes.length; i++) {
				bw.write("@attribute " + attributes[i] + " {0,1}");
				bw.newLine();
			}
			bw.write("@attribute class {0,1}");
			bw.newLine();
			bw.newLine();

			// write @data
			bw.write("@data");
			bw.newLine();

			// read in tagged file
			char[] cbuf = new char[(int) infile.length()];
			while (br.ready()) {
				br.read(cbuf);
			}
			String sbuf = new String(cbuf);
			sbuf = sbuf.substring(sbuf.indexOf("'"));

			// get tagged sentences
			String[] taggedSent = sbuf.split("\n");
			for (int i = 0; i < taggedSent.length; i++) {
				String sent = taggedSent[i].substring(1,
						taggedSent[i].length() - 4);

				// stem words in each sentence
				String[] words = sent.split(" ");
				words = stemmer.stemWords(words);

				String str = "";
				for (int j = 0; j < words.length; j++) {
					str += words[j] + " ";
				}

				// test each attribute word present or not present in each
				// sentence
				// and write the corresponding attribute value {0,1}
				for (int j = 0; j < attributes.length; j++) {
					if (str.indexOf(attributes[j]) != -1) {
						bw.write("1,");
					} else {
						bw.write("0,");
					}
				}
				bw.write(taggedSent[i].substring(taggedSent[i].length() - 2));
				bw.newLine();
				bw.newLine();
			}
			br.close();
			bw.close();
		} catch (Exception e) {
			e.printStackTrace();
		}

	}

	public static boolean[] getAttributeVector(String sent) {
		boolean[] vector = new boolean[attributes.length];
		String[] words = sent.split(" ");
		words = stemmer.stemWords(words);
		sent = "";
		for (int i = 0; i < words.length; i++) {
			sent += words[i] + " ";
		}
		for (int i = 0; i < attributes.length; i++) {
			if (sent.indexOf(attributes[i]) != -1) {
				vector[i] = true;
			} else {
				vector[i] = false;
			}
		}
		return vector;
	}

	public static void main(String[] args) {
		
		final String taggedPath = "E:\\zhyx\\SurveyReport\\sentence-tagged.arff";
		final String trainingSetPath = "E:\\zhyx\\SurveyReport\\sentence-train.arff";
		final String testSetPath = "E:\\zhyx\\SurveyReport\\sentence-test.arff";
		final String indexPath = "E:\\zhyx\\SurveyReport\\index\\";
		final String wordnetIndexPath = "E:\\zhyx\\SurveyReport\\wordnetIndex";
		
		ClassifierTraining.tagging(indexPath, wordnetIndexPath, taggedPath);
		ClassifierTraining.arffGenerator(taggedPath, trainingSetPath);
	}
}
