package hk.polyu;

import java.io.*;
import java.util.*;

/**
 * This class contain extended document info. including  the content of the document
 * and the token set.
 * @author hezongjian
 *
 */
public class DocumentInfo extends BasicDocumentInfo{
	/**
	 * Content of the file.
	 */
	String		mContent;
	
	/**
	 * key: token, value, the location of the token.
	 */
	HashMap<String, Vector<Integer>> mTokens;

	/**
	 * 
	 * @param id 	Integer ID of the document
	 * @param f		File descriptor passed by the directory listing.
	 */
	public DocumentInfo(int id, File f) {
		this.mID = id;
		this.mName = f.getName();
		
		// read all things out.
		try {
			this.mPath = f.getCanonicalPath();
			
			FileReader reader = new FileReader(f);
		    char[] chars = new char[(int) f.length()];
		    reader.read(chars);
		    mContent = new String(chars);
		    reader.close();
		} catch (IOException e) {
		    e.printStackTrace();
		}
	}
	
	/**
	 * Get how many tokens are there in the document
	 * @return Number of tokens.
	 */
	public int getLength() {
		return this.mLength;
	}
	
	/**
	 * This method parse a document to generate all tokens.
	 * @param bStammer Whether we should stem the terms or not.
	 * @param stopList Specify the stop words.
	 * @return successful or not.
	 */
	public boolean parse(boolean bStammer, Vector<String> stopList) {
		// create token lists.
		this.mTokens = new HashMap<String, Vector<Integer>>();
		
		// tokenization first.
		String [] tokens = this.mContent.split("\\W");
		this.mLength = tokens.length;
		
		int nTokenIndex = 0;
		for (String s: tokens) {  
			if (s.isEmpty())
				continue;
		    s = s.toLowerCase();
		    
		    // process stop list.
		    if (stopList != null && stopList.contains(s))
		    	continue;
		    
		    // process stemmer
		    if (bStammer == true) {
		    	Stemmer stm = new Stemmer();
		    	char [] buf = s.toCharArray();
		    	stm.add(buf, buf.length);
		    	stm.stem();
		    	s = stm.toString();
		    }
		    
		    
		    if (this.mTokens.containsKey(s)) {
		    	this.mTokens.get(s).add(nTokenIndex);
		    } 
		    else {
		    	Vector<Integer> vec = new Vector<Integer>();
		    	vec.add(nTokenIndex);
		    	this.mTokens.put(s, vec);
		    }
		    
		    nTokenIndex++;
		} 
		
		// to save memory
		this.mContent = null;
		
		return true;
	}
	
}
