package asa;

import forum.ForumFactory;
import forum.ForumPost;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import ml.ArffFileFormat;
import ml.BagDataModel;
import ml.DataModel;
import ml.HCRFFileFormat;
import ml.LibSVMFileFormat;
import ml.MLFileFormat;
import ml.MLGlue;
import ml.MajorityVoting;
import ml.NullModel;
import ml.SequenceDataModel;
import ml.VoteFlip;
import ml.WekaGlue;
import ml.features.FeatureExtractor;
import ml.features.LemmaExtractor;
import ml.features.POSExtractor;
import ml.features.PolarityReversalExtractor;
import ml.features.SRLExtractor;
import ml.features.SentLengthExtractor;
import ml.features.WordPolarityExtractor;
import ner.NER;
import ner.NullNER;
import ner.StanfordNER;
import output.Output;
import output.PolarityEvaluator;
import output.QVXOutput;
import output.RelationDatabase;
import output.SpreadSheetOutput;
import output.StdoutOutput;
import parser.LTHSRLPipeline;
import parser.NullParser;
import parser.OpenNLPPipeline;
import parser.ParserPipeline;
import parser.StanfordPipeline;
import translate.RemoveTranslate;
import translate.NullTranslate;
import translate.Translate;

import corpus.ParseException;
import corpus.WordPolarityCorpus;
import corpus.SentPolarityCorpus;
import crawler.BugCrawler;
import crawler.CorpusCrawler;
import crawler.Crawler;
import crawler.MockUpCrawler;
import crawler.QlikViewCrawler;
import crawler.StdinCrawler;
import crawler.TwitterCrawler;

public class ASA {
	private static final String[] emoLabels = new String[] { "neu", "pos",
			"neg" };

	private ParserPipeline parser;
	private DataModel sdm;
	private List<Crawler> crawlers;
	private Output output;
	private NER ner;
	private ForumFactory fac;
	private SentPolarityCorpus spc;
	private boolean sequence;
	private MainThreadPass pass;

	private String ml;
	private String features;
	private String models;
	private String inpath;

	private String inputdbname;
	private String inputdbuser;
	private String inputdbpassword;
	private String datestart;
	private String dateend;

	private String outputdbname;
	private String outputdbpassword;
	private String outputdbuser;

	private long timeStart;
	private long timeEnd;

	private Pipeline[] pipelines;
	private Translate[] translators;

	private String translator;
	// private PostBucket inBucket;
	// private PostBucket translateBucket;
	// private PostBucket outBucket;
	private int nbrPipelines;

	private int nbrTranslators;

	public static void main(String[] args) {
		// Priority order is, 1. command-line, 2. conf file, 3. default
		Options o = new Options();
		try {
			if (args.length != 0)
				o.parse(args); // this is just to get conf file location
			File conf = new File(o.getProperty("conf"));
			if (conf.exists())
				o.readFile(conf);
			if (args.length != 0)
				o.parse(args);
		} catch (ParseException e) {
			System.err.println(e.getMessage());
			o.printOptions();
			System.exit(1);
		} catch (IOException ie) {
			System.err.println("Couldn't load options " + ie.getMessage());
			o.printOptions();
			System.exit(1);
		} catch (java.text.ParseException pe) {
			System.err.println(pe.getMessage());
			o.printOptions();
			System.exit(1);
		}
		try {
			new ASA(o);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	public ASA(Options o) throws Exception {
		String run = o.getProperty("run");

		if (run.equals("makeconf")) {
			System.out.println("Generating default config file");
			FileWriter fw = new FileWriter(new File(o.getProperty("conf")));
			Options newo = new Options();
			newo.store(fw, "Default conf file for asa\n" + newo.optionString());
			return;
		}

		features = o.getProperty("features");
		inpath = o.getProperty("input.path");
		models = o.getProperty("models");
		ml = o.getProperty("ml");
		sequence = ml.contains("hcrf");
		inputdbname = o.getProperty("input.dbname");
		inputdbuser = o.getProperty("input.dbuser");
		inputdbpassword = o.getProperty("input.dbpass");
		datestart = o.getProperty("input.datestart");
		dateend = o.getProperty("input.dateend");
		outputdbname = o.getProperty("output.dbname");
		outputdbpassword = o.getProperty("output.dbpass");
		outputdbuser = o.getProperty("output.dbuser");
		nbrPipelines = Integer.parseInt(o.getProperty("pipeline.threads"));
		nbrTranslators = Integer.parseInt(o.getProperty("translation.threads"));
		translator = o.getProperty("translation");

		crawlers = new ArrayList<Crawler>();

		int nbrCrawler = o.getProperty("input").split(",\\s*").length;

		PostBucket inBucket = new PostBucket(1000, nbrCrawler);
		PostBucket translateBucket = new PostBucket(1000, nbrTranslators);
		PostBucket outBucket = new PostBucket(200, nbrPipelines);

		parser = setupParserPipeline(o.getProperty("parser"),
				o.getProperty("ner.regexp"), o.getProperty("ner"),
				run.equals("classify"));

		if (run.equals("classify")) {

			setupClassifier();

			if (o.getProperty("loadold").contains("true"))
				fac = ForumFactory.load();
			else
				fac = new ForumFactory();
			ner = setupNER(o.getProperty("ner"), o.getProperty("ner.regexp"));
			String[] inputs = o.getProperty("input").split(",\\s*");
			for (String input : inputs) {
				crawlers.add(setupCrawler(input, inpath, inBucket));
			}
			String dir = o.getProperty("output.dir");
			String file = o.getProperty("output.file");
			String fileLocation = (file.startsWith(dir)) ? file : dir + file;

			output = setupOutput(o.getProperty("output"), fileLocation,
					o.getProperty("output.dir"), o.getProperty("output.delim"));
			if (doTranslation())
				translators = new Translate[nbrTranslators];
			pipelines = new Pipeline[nbrPipelines];

			if (doTranslation()) {
				setupTranslation(inBucket, translateBucket);
				setupPipelines(translateBucket, outBucket);
			} else {
				setupPipelines(inBucket, outBucket);
			}

			startCrawling();
			if (doTranslation())
				startTranslation();
			startPipelines(outBucket);
		} else if (run.equals("train")) {
			System.err.print("Loading training data ... ");
			timeStart = System.currentTimeMillis();
			spc = new SentPolarityCorpus(inpath);
			setupDataModel();
			if (o.getProperty("features.generate").equals("true"))
				setupModelTrain();
			else
				setupModelCached();
			timeEnd = System.currentTimeMillis();
			printTime();
			startTrain(ml);
		} else
			throw new Error("Must specify something to run");
	}

	private void setupClassifier() throws IOException, Exception {
		if (ml.equals("vote")) {
			sdm = new MajorityVoting(
					"sentiment",
					parser,
					emoLabels,
					new WordPolarityCorpus(new File("corpus/polwords/mpqa.txt")));
		} else if (ml.equals("flip")) {
			sdm = new VoteFlip(
					"sentiment",
					parser,
					emoLabels,
					new WordPolarityCorpus(new File("corpus/polwords/mpqa.txt")));
		} else if (ml.equals("null")) {
			sdm = new NullModel("sentiment", parser, emoLabels);
		} else {
			System.err.print("Loading cached data ... ");
			timeStart = System.currentTimeMillis();
			setupDataModel();
			timeEnd = System.currentTimeMillis();
			printTime();
			MLGlue glue = setupMLGlue(ml, "models/sentiment.model",
					"models/sentiment");
			sdm.setMLGlue(glue);
			setupModelCached();
		}
	}

	private void setupDataModel() {
		if (ml.equals("hcrf"))
			sdm = new SequenceDataModel("sentiment", parser, emoLabels);
		else
			sdm = new BagDataModel("sentiment", parser, emoLabels);
	}

	private MLGlue setupMLGlue(String ml, String modelFile,
			String descriptionFile) throws Exception {
		System.err.print("Loading sentiment model ... ");
		timeStart = System.currentTimeMillis();
		MLGlue glue;
		if (ml.equals("arff")) {
			glue = new WekaGlue(modelFile, descriptionFile + ".arff");
		} else
			throw new Error("Unimplemented format " + ml);
		timeEnd = System.currentTimeMillis();
		printTime();
		return glue;
	}

	private void setupPipelines(PostBucket inBucket, PostBucket outBucket)
			throws Exception {
		for (int i = 0; i < nbrPipelines; i++)
			pipelines[i] = new Pipeline(sdm, ner, fac, inBucket, outBucket);
		pass = new MainThreadPass();
		Shutdown shutdown = new Shutdown(pipelines, crawlers, translators,
				inBucket, outBucket, output, fac, pass);
		try {
			Runtime.getRuntime().addShutdownHook(shutdown);
			System.err.println("Shutdown hook added");
		} catch (Throwable t) {
			System.err.println("Could not add Shutdown hook");
		}
	}

	private void setupTranslation(PostBucket inBucket, PostBucket outBucket) {
		for (int i = 0; i < nbrTranslators; i++) {
			if (translator.equals("remove"))
				translators[i] = new RemoveTranslate(inBucket, outBucket);
			else if (translator.equals("null"))
				translators[i] = new NullTranslate(inBucket, outBucket);
			else
				throw new Error("Unimplemented translator: " + translator);
		}
		System.err.println("Translation set up");
	}

	private Output setupOutput(String output, String outfile, String outdir,
			String delim) throws Exception {
		System.err.println("Loading output writer");
		if (output.equals("stdout"))
			return new StdoutOutput();
		else if (output.equals("evaluate"))
			return new PolarityEvaluator(new SentPolarityCorpus(inpath));
		else if (output.equals("database"))
			return new RelationDatabase(outputdbname, outputdbuser,
					outputdbpassword);
		else if (output.equals("qvx"))
			return new QVXOutput(outdir);
		else if (output.equals("spreadsheet"))
			return new SpreadSheetOutput(new File(outfile), delim);
		else
			throw new Error("Unimplemented output " + output);
	}

	private NER setupNER(String ner, String nerfile) throws ParseException {
		System.err.println("Loading NER");
		if (ner.equals("null"))
			return new NullNER(fac);
		else if (ner.equals("stanford") || ner.equals("regexp"))
			return new StanfordNER(fac);
		else
			throw new Error("Unimplemented ner " + ner);
	}

	private Crawler setupCrawler(String crawlerstr, String infile,
			PostBucket outBucket) throws Exception {
		System.err.print("Loading crawler ... ");
		timeStart = System.currentTimeMillis();
		Crawler crawler = null;
		if (crawlerstr.equals("stdin"))
			crawler = new StdinCrawler(fac, parser, outBucket);
		else if (crawlerstr.equals("corpus"))
			crawler = new CorpusCrawler(fac, parser, outBucket,
					new SentPolarityCorpus(infile));
		else if (crawlerstr.equals("qlikview"))
			crawler = new QlikViewCrawler(fac, parser, outBucket, inputdbname,
					inputdbuser, inputdbpassword, datestart, dateend);
		else if (crawlerstr.equals("mockup"))
			crawler = new MockUpCrawler(fac, parser, outBucket);
		else if (crawlerstr.equals("bug"))
			crawler = new BugCrawler(fac, parser, outBucket, inpath);
		else if (crawlerstr.equals("twitter"))
			crawler = new TwitterCrawler(fac, parser, outBucket);
		else
			throw new Error("Unimplemented crawlerer " + crawlerstr);
		timeEnd = System.currentTimeMillis();
		printTime();

		return crawler;
	}

	private ParserPipeline setupParserPipeline(String parser, String nerfile,
			String nertype, boolean classify) throws Exception {
		System.err.println("Loading parser");
		ParserPipeline pipeline;
		if (parser.equals("stanford")) {
			String args = "tokenize, ssplit, lemma, pos";
			if (nertype.contains("regexp") && classify)
				args += ", regexner";
			if (nertype.contains("stanford") && classify)
				args += ", ner";
			pipeline = new StanfordPipeline(args, nerfile);
		} else if (parser.equals("opennlp"))
			pipeline = new OpenNLPPipeline(models);
		else if (parser.equals("lthsrl"))
			pipeline = new LTHSRLPipeline(models + "/lthsrl/");
		else if (parser.equals("null"))
			pipeline = new NullParser();
		else
			throw new Error(parser + " is unimplemented.");
		return pipeline;
	}

	private void setupModelCached() throws IOException {
		int offset = 0;
		for (String s : features.split(",\\s*")) {
			if (s.equals("lemma"))
				sdm.addFeatureExtractor(new LemmaExtractor(offset));
			else if (s.equals("pos"))
				sdm.addFeatureExtractor(new POSExtractor(offset));
			else if (s.equals("slength"))
				sdm.addFeatureExtractor(new SentLengthExtractor(offset));
			else if (s.equals("dict"))
				sdm.addFeatureExtractor(new WordPolarityExtractor(offset,
						new WordPolarityCorpus(new File(
								"corpus/polwords/mpqa.txt")), sequence));
			else if (s.equals("reversal"))
				sdm.addFeatureExtractor(new PolarityReversalExtractor(offset));
			else if (s.equals("srl"))
				sdm.addFeatureExtractor(new SRLExtractor(offset));
			else
				throw new Error("Unknown feature " + s);
			offset = sdm.getNbrFeatures();
		}
	}

	private void setupModelTrain() throws IOException {
		int offset = 0;
		for (String s : features.split(",\\s*")) {
			FeatureExtractor fe = null;
			if (s.equals("lemma"))
				fe = new LemmaExtractor(offset, spc, parser);
			else if (s.equals("pos"))
				fe = new POSExtractor(offset, spc, parser);
			else if (s.equals("slength"))
				fe = new SentLengthExtractor(offset, spc.getLongestSentence());
			else if (s.equals("dict"))
				fe = new WordPolarityExtractor(offset, new WordPolarityCorpus(
						new File("corpus/polwords/mpqa.txt")), sequence);
			else if (s.equals("reversal"))
				fe = new PolarityReversalExtractor(offset);
			else if (s.equals("srl"))
				fe = new SRLExtractor(offset, spc, parser);
			else
				throw new Error("Unknown feature " + s);
			sdm.addFeatureExtractor(fe);
			offset = sdm.getNbrFeatures();
			fe.store();
		}
	}

	private void startCrawling() {
		for (Crawler crawler : crawlers) {
			crawler.start();
		}
	}

	private void startTranslation() throws InterruptedException {
		System.err.println("Starting translation");
		for (Translate translate : translators) {
			translate.start();
		}
	}

	private void startPipelines(PostBucket outBucket) throws Exception {
		System.err.println("Ready to parse");
		for (Pipeline p : pipelines)
			p.start();

		int i = 0;
		timeStart = System.currentTimeMillis();
		while (!outBucket.finished() || outBucket.size() > 0) {
			List<ForumPost> ft = outBucket.getPost();
			if (ft == null)
				break;
			try {
				output.outputThread(ft);
			} catch(Exception e) {
				System.err.println("Trouble while closing output...");
				break;
			}
			i++;
			if (i % 100 == 0) {
				timeEnd = System.currentTimeMillis();
				System.err.println(i + " threads written. Done in "
						+ (timeEnd - timeStart) / 1000 + "s");
			}
		}
		pass.done();
		System.err.println("Total number of threads written: " + i);
	}

	private void startTrain(String ml) throws IOException {
		MLFileFormat mff = null;
		if (ml.equals("arff"))
			mff = new ArffFileFormat("extract", sdm);
		else if (ml.equals("hcrf"))
			mff = new HCRFFileFormat("extract", sdm);
		else if (ml.equals("libsvm"))
			mff = new LibSVMFileFormat("extract", sdm);
		else
			throw new Error("Format unimplemented: " + ml);

		System.err.print("Extracting features ... ");

		timeStart = System.currentTimeMillis();
		sdm.extractFeatures(spc);
		timeEnd = System.currentTimeMillis();
		printTime();

		System.err.println("Writing data");
		mff.writeFeatures();
		mff.writeData();
		mff.close();
	}

	private boolean doTranslation() {
		return nbrTranslators > 0 && !translator.contains("null");
	}

	private void printTime() {
		System.err.printf("[%.1f sec]\n", (timeEnd - timeStart) / 1000.0);
	}
}
