package lexical;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.StringTokenizer;

/**
 *
 * @author 720125
 */
public class Tokenizer {

    private ArrayList<String> tokens = new ArrayList();
    private String file;
    private String delimiters = " .;('\"";

    public Tokenizer(String file) throws IOException {
        this.file = file;
        try {
            BufferedReader in = new BufferedReader(new FileReader(file));
            String str;
            while (in.ready()) {
                str = in.readLine();
                process(str);
            }
            in.close();
        } catch (IOException e) {
            throw e;
        }
    }

    private void process(String str) {
        //StringTokenizer st = new StringTokenizer(str);
        StringTokenizer st = new StringTokenizer(str, delimiters, true);

        int counter = 0;
        while (st.hasMoreTokens()) {
            String token = st.nextToken();
            if (!registrado(token)) {
                tokens.add(counter++, token);
            }
        }
    }

    private boolean registrado(String token) {
        return tokens.contains(token);
    }

    public void print() {
        int i = 1;
        for (String t : tokens) {
            System.out.println(String.valueOf(tokens.indexOf(t)) + '\t' + t);
        }
    }
}
