import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;

public class DocumentsLoader {	
	private Map<Integer, List<Document>> documents;
	private final int[] classes;
	private final Normalizer normalizer;
	private int docsCount;
	
	public DocumentsLoader(int c, Normalizer normalizer) {
		classes = new int[c];
		for (int i = 0; i < c; i++) {
			classes[i] = i;
		}
		this.normalizer = normalizer;
		documents = new HashMap<Integer, List<Document>>();
		docsCount = 0;
	}
			
	public int trainingDocumentsCount(int clazz) {
		return docsCount;
	}
	
	public int[] getClasses() {
		return classes;
	}
	
	public List<Document> getTrainingDocuments() {
		List<Document> docs = new ArrayList<Document>();
		for(int i : classes) {
			docs.addAll(documents.get(i));
		}
		return docs;
	}
	
	public List<Document> getTrainingDocuments(int clazz) {
		return documents.get(clazz);
	}
	
	public void loadTrainingDocuments(String path) throws IOException {
		File[] files = (new File(path)).listFiles();
		
		for(File f : files) {		
			docsCount++;
			
			List<String> tokens = tokenize(f);						
			int clazz = Integer.parseInt(tokens.get(0));
			tokens.remove(0);
			Set<String> terms = normalizer.getTerms(tokens);
			
			Document doc = new Document(terms, tokens, clazz);
			
			List<Document> docs;			
			if (documents.containsKey(clazz)) {
				docs = documents.get(clazz);
			} else {
				docs = new ArrayList<Document>();				
			}
			docs.add(doc);
			documents.put(clazz, docs);
		}
	}
	
	public static List<String> tokenize(File f) throws IOException {
		BufferedReader r = new BufferedReader(new FileReader(f));
		List<String> tokens = new ArrayList<String>();
		
		String str;
		while((str = r.readLine()) != null) {
			StringTokenizer tokenizer = new StringTokenizer(str, " ,,;:\"\'");
			while(tokenizer.hasMoreTokens()) {
				tokens.add(tokenizer.nextToken());
			}
		}
		
		r.close();
		
		return tokens;
	}
	
	public static Document docFromFile(File f, Normalizer normalizer) throws IOException {
		List<String> tokens = tokenize(f);						
		Set<String> terms = normalizer.getTerms(tokens);		
		return new Document(terms, tokens, -1);
	}
 }
