/*
 * Copyright (C) <2009>  <Matthew Dickinson>
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
*/

package com.pulsarwebdevelopment.concordance.plugins;

import java.io.File;
import java.util.ArrayList;

import com.pulsarwebdevelopment.concordance.Concordance;
import com.pulsarwebdevelopment.concordance.Tools;

/**
 * Processes text file and removes all unique words.
 * @author Matthew Dickinson
 * Last updated: 11/9/2008
 */
public class UniqueBlanker extends UniqueX{

	private Concordance uniqueList;
	private Concordance pairList;
	private Concordance tripleList;
	private ArrayList<String> uniqueWords;
	private ArrayList<String> pairWords;
	private ArrayList<String> tripleWords;
	boolean wantUnique;
	boolean wantPairs;
	boolean wantTriples;

	//@Override
	public void go(Object[] args) {
		int concIndex = (Integer) args[0];
		readFile = (File) args[1];
		exportFile = (File) args[2];
		wantUnique = (Boolean) args[3];
		wantPairs = (Boolean) args[4];
		wantTriples = (Boolean) args[5];

		if(wantUnique){
			uniqueList = new Concordance(concIndex);
			//uniqueList.setSubset(1);
			uniqueWords = uniqueList.getWordSet(1);
		}
		if(wantPairs){
			pairList = new Concordance(concIndex);
			//pairList.setSubset(2);
			pairWords = pairList.getWordSet(2);
		}
		if(wantTriples){
			tripleList = new Concordance(concIndex);
			//tripleList.setSubset(3);
			tripleWords = tripleList.getWordSet(3);
		}

		parseFile();
	}

	//@Override
	public void plug(PluginScreen screen) {
		screen.setTitleText("Unique Blanker", "Removes unique words from a document");
		screen.giveConcList("Choose the concordance to use.");
		screen.giveFileBrowser(FileBrowser.FileType.TEXT, "Choose the file to process.");
		screen.giveFileBrowser(FileBrowser.FileType.TEXT, "Choose the file to output results to.");
		String[] values = {"Unique Words", "Word Pairs", "Word Triples"};
		screen.giveCheckBoxes("Choose which set(s) of words you want to have displayed", values);

	}

	@Override
	public String getHeader() {
		return "\t";
	}

	@Override
	public String getFooter() {
		return "";
	}

	@Override
	public String getParagrapStart() {
		return "\t";
	}

	@Override
	public String getParagrapEnd() {
		return "\n";
	}

	@Override
	public String handleWord(String word){
		String capWord = Tools.capitalize(word);
		
		//run these checks backwards. As triples are those that appear 3 times, 
		// these words should show up more often than words pairs, etc...
		if(wantTriples && tripleWords.contains(capWord)){	
			return getBlanks(word, '{', '}');				
		}
		if(wantPairs && pairWords.contains(capWord)){	
			return getBlanks (word, '[', ']');			
		}
		if(wantUnique && uniqueWords.contains(capWord)){	
			return getBlanks (word, '<', '>');				
		}

		return word;
	}

	private String getBlanks(String word, char openBlank, char closeBlank) {
		StringBuffer newWord = new StringBuffer();
		newWord.append(openBlank);
		for(int i = 0; i < word.length(); i++){
			newWord.append('_');
		}
		newWord.append(closeBlank);
		return newWord.toString();
	}

}
