/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package org.essilab.analyzer.application;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import org.essilab.analyzer.analyzer.AnalyzedTerm;
import org.essilab.analyzer.util.Analyzer;
import org.essilab.analyzer.util.Statistic;

/**
 * This class is used to extract the main tokens in the text of the file Test_Test.txt.
 *
 * @author Mohamed-Amine Chouchene, Ahcène Idinarere, Vincent Nock, Alain
 * Janin-Manificat
 */
public class ApplicationAnalyzer {

    public static void main(String[] args) throws IOException {

        String path = null;
        if (path == null) {
            path = "./Test_Text.txt";
        }

        double systemTime = System.currentTimeMillis();

        Analyzer a = new Analyzer();

        // Clean text from useless words
        String cleanText = a.cleanUselessTokens(a.readFromFile(path));

        System.out.println("Step 1 : "
                + (System.currentTimeMillis() - systemTime));

        // Get list of words ordered by occurrences in the text
        Map<String, Integer> words = Statistic.evaluateQuantityTextFromString(cleanText);

        System.out.println("Step 2 : "
                + (System.currentTimeMillis() - systemTime));

        // Get a list of AnalyzedTerms
        ArrayList<AnalyzedTerm> rootTerms = a.createTermsList(words);

        System.out.println("Step 3 : "
                + (System.currentTimeMillis() - systemTime));

        // Get all links from all AnalyzedTerm of the text
        a.loadLinks(rootTerms);

        System.out.println("Step 4 : "
                + (System.currentTimeMillis() - systemTime));

        // Apply occurrences factor to weight
        a.applyOccurrences(rootTerms);
        
        // Apply words frequency
        a.applyWordsFrequency(rootTerms);

        a.sortTermsByWeight(rootTerms);
        a.sortTermsByWeight(a.linkedAnalyzedTerms);

        System.out.println("Step 5 : "
                + (System.currentTimeMillis() - systemTime));

        System.out.println("Number of linked words : "
                + a.linkedAnalyzedTerms.size());

        for (AnalyzedTerm analyzedTerm : rootTerms) {
        	//if (analyzedTerm.getWeight() > 0)
        		System.out.println(analyzedTerm.getTerm().getTitle() + " : "
                    + analyzedTerm.getWeight());
        }
        
//        System.out.println("\n-------------\n");
//        
//        int i = 0;
//        for (AnalyzedTerm analyzedTerm : a.linkedAnalyzedTerms) {
//			if (i == 10)
//				break;
//			
//			System.out.println(analyzedTerm.getTerm().getTitle() + " : "
//                    + analyzedTerm.getWeight());
//		}
//        
    }
}
