/*
 * @(#)SemanticSimilarity.java	0.5 10/01/07
 *
 * Copyright 2007 Fabio Gasparetti. All rights reserved.
 */

package org.tabularium.text.nlp;

import java.io.BufferedReader;
import java.io.FileReader;
import java.util.*;
import java.io.*;

import org.tabularium.text.TermWeightVector;
import org.tabularium.text.TextSimilarity;
import org.tabularium.text.nlp.Stemmer;
import org.tabularium.text.nlp.wordnet.*;
// debug
import org.tabularium.text.nlp.wordnet.jwi.*;

/**
 * Basically extended the Resnik similarity function considering the similarity
 * between the entities collected by the internal recognizer.
 * <p>
 * Main (and parhaps the only) entities taken into consideration by GATE are:
 * <ul>
 * <li>Date
 * <li>FirstPerson
 * <li>JobTitle
 * <li>Location
 * <li>Money
 * <li>Organization
 * <li>Person
 * <li>Sentence
 * <li>SpaceToken
 * <li>Title
 * <li>Unknown
 * </ul> 
 * 
 * @todo Collocations are properly managed?
 * @todo weight the entity classes 
 * 
 * @author Fabio Gasparetti
 * @version 0.5, 10/01/07
 */
public class SemanticSimilarity implements TextSimilarity {
	public static final long LOCATION = 1;

	public static final long ORGANIZATION = 2;

	public static final long PERSON = 3;

	TextTagger tagger = null;

	WordNet wordNet = null;

	Resnik resnik = null;

	ResnikDisambiguation disambiguation = null;

	protected String[] entityNames = new String[] {};

	protected double[] entityWeights = new double[] {};

	// Stemmer stemmer = null;
/*
	public SemanticSimilarity(TextTagger t, WordNet wn, Stemmer stem,
			CollectionFrequency cf, Collocations c) {
		this(t, wn, stem, cf, c, new Resnik(wn, stem, cf));
	}
*/
	public SemanticSimilarity(TextTagger t, WordNet wn, Stemmer stem, Collocations c, Resnik r) {
		tagger = t;
		wordNet = wn;
		resnik = r;

		disambiguation = new ResnikDisambiguation(t, wn, c, r);
	}

	public void setEntities(String[] entities, double[] weights) {
		entityNames = entities;
		entityWeights = weights;
	}

	/**
	 * Returns a value in the [0, 1] range that represents how much two texts
	 * are similar.
	 * 
	 * @param text1
	 * @param text2
	 * @return
	 */
	public double similarity(TermWeightVector v1, TermWeightVector v2) {
		throw new RuntimeException("feature not yet implemented");
	}

	/**
	 * Returns a value in the [0, 1] range that represents how much two texts
	 * are similar.
	 * 
	 * @param text1
	 * @param text2
	 * @return
	 */
	public double similarity(String text1, String text2) {
		//System.out.println("RESNKIK: '" + text1 + "' <== VS ==> '" + text2 + "'");
		String text1a = text1.replaceAll("[^\\x00-\\x7F]", "");
		String text2a = text2.replaceAll("[^\\x00-\\x7F]", "");
		if ((text1a.trim().length() ==0 ) || (text2a.trim().length() ==0))
			return 0d;
		Tag[] tags1 = null, tags2 = null;
		Tag t;
		try {
			tags1 = tagger.tag(text1);
			tags2 = tagger.tag(text2);
		} catch (Exception ex) {
			ex.printStackTrace();
			return -1d;
		}
//		for (int k = 0; k < tags1.length; k++) {
//			System.out.println("tag1:" + tags1[k]);
//		}
//		for (int k = 0; k < tags2.length; k++) {
//			System.out.println("tag2:" + tags2[k]);
//		}
		// retrieve nouns/collocations
		Tag[] nouns1 = disambiguation.nouns(tags1);
		Tag[] nouns2 = disambiguation.nouns(tags2);

		if (nouns1.length == 0 || nouns1.length == 0)
			return 0d;

		if (nouns1.length < nouns2.length) {
			Tag[] temp = nouns1;
			nouns1 = nouns2;
			nouns2 = temp;
		}
		int i, j, jmax;
		WeightedSynset ws, wsmax = null;
		double ret = 0d;
		for (i = 0; i < nouns1.length; i++) {
			for (j = 0; j < nouns2.length; j++) {
				ws = resnik.resnikSimilarity(nouns1[i].getContent(), nouns2[j]
						.getContent());
				if (ws == null)
					continue;

				if (wsmax != null) {
					if (wsmax.getWeight() < ws.getWeight()) {
						wsmax = ws;
						jmax = j;
					}
				} else {
					wsmax = ws;
					jmax = j;
				}
			}
			if (wsmax != null) {
				ret += resnik.normalize(wsmax.getWeight());
				wsmax = null;
			}
		}

		//System.out.println("- score without entities: "+ret/Math.sqrt(nouns1.length * nouns2.length));
		int den1 = nouns1.length;
		int den2 = nouns2.length;
		Tag[] entities1;
		Tag[] entities2;

		entities1 = TextTagger.filter(tags1, "Location");
		entities2 = TextTagger.filter(tags2, "Location");
		den1 += entities1.length;
		den2 += entities2.length;
		ret += matchEntity(entities1, entities2);
		
		entities1 = TextTagger.filter(tags1, "Person");
		entities2 = TextTagger.filter(tags2, "Person");
		den1 += entities1.length;
		den2 += entities2.length;
		ret += matchEntity(entities1, entities2);

		entities1 = TextTagger.filter(tags1, "Organization");
		entities2 = TextTagger.filter(tags2, "Organization");
		den1 += entities1.length;
		den2 += entities2.length;
		ret += matchEntity(entities1, entities2);

		ret /= Math.sqrt(den1 * den2);

		//System.out.println("- score with entities: "+ret);
		return ret;
	}

	/**
	 * Useful to match two lists of entities, e.g., lists of organizations,
	 * lists of locations, etc.
	 * 
	 * The tags must be composed of nouns.
	 */
	protected int matchEntity(Tag[] tags1, Tag[] tags2) {
		int matches = 0;
		Map tagSynsets1 = new HashMap();
		Set tagSynsets2 = new HashSet();
		Set tagNouns1 = new HashSet();
		Set tagNouns2 = new HashSet();
		// Tag[] filteredTags1 = TextTagger.filter(tags1, entity);
		// Tag[] filteredTags2 = TextTagger.filter(tags2, entity);
		
		// collect the synsets for each entity if exist
		String s;
		Synset[] ss;
		int i;
		for (i = 0; i < tags1.length; i++) {
			s = tags1[i].getContent();
			s = Collocations.removeUselessBlanks(s);			
			//System.out.println("- entities (1st list): " + s);
			ss = wordNet.getSynsets(s, PartOfSpeech.NOUN);
			tagSynsets1.put(s, ss);
			tagNouns1.add(s);
		}
		for (i = 0; i < tags2.length; i++) {
			s = tags2[i].getContent();
			s = Collocations.removeUselessBlanks(s);			
			//System.out.println("- entities (2nd list): " + s);
			ss = wordNet.getSynsets(s, PartOfSpeech.NOUN);
			tagSynsets2.add(ss);
			tagNouns2.add(s);
			
		}
		if (tags1.length < tags2.length) {
			Tag[] temp = tags1;
			tags1 = tags2;
			tags2 = temp;
		}
		Iterator iter = tagNouns1.iterator();
		while (iter.hasNext()) {
			s = (String)iter.next();
			// straight lexical comparison			
			if (tagNouns2.contains(s)) {
				matches++;
				continue;
			} 
			// synset-based comparison
			ss = (Synset[])tagSynsets1.get(s);
			if (ss == null)
				continue;
			for (i = 0; i < ss.length; i++) {
				if (tagSynsets2.contains(ss[i])) {
					matches++;
					break;
				}
			}
		}		
		//System.out.println("- matches: " + matches);
		return matches;
	}

	protected Synset[] synsetIntersection(Synset[] ss1, Synset[] ss2) {
		Set ret = new TreeSet();
		for (int i = 0; i < ss1.length; i++) { 
			for (int j = 0; j < ss2.length; j++) {
				if (ss1[i] == ss2[j]) {
					ret.add(ss1[i]);
					break;
				}
			}
		}
		return (Synset[])ret.toArray(new Synset[] {});
	}
	
	/**
	 * Similarity measure based on the Renik disambiguation algorithm.
	 * 
	 * In the 1st step, the two text are parsed and the most rapresentative
	 * synsets are extracted according with the Resnik disambiguation algorithm.
	 * 
	 * The 2nd step compares the two weighted-sets of synsets in the following
	 * way:
	 * 
	 * 1) find the pair that has the highest Resnik-similarity 2) ret +=
	 * similarity * (average of the 2 weights associated to the synsets during
	 * the disambiguation) 3) normalization (how??????????????)
	 * 
	 * @param text1
	 * @param text2
	 * @return
	 */
	public double resnikSimilarity(String text1, String text2) {
		//System.out.println("RESNKIK1: '" + text1 + "' vs '" + text2 + "'");
		WeightedSynset[] wss1 = disambiguation.disambiguate(text1);
		// for (int k = 0; k < wss1.length; k++) {
		// System.out.println("wss1["+k+"]\t weight:"+wss1[k].getWeight()+"\t
		// synset:"+wss1[k].getSynset());
		// }
		WeightedSynset[] wss2 = disambiguation.disambiguate(text2);
		// for (int k = 0; k < wss2.length; k++) {
		// System.out.println("wss2["+k+"]\t weight:"+wss2[k].getWeight()+"\t
		// synset:"+wss2[k].getSynset());
		// }
		int i, j, jmax = 0;
		double w, wtot = 0d, num = 0d;
		double den1 = 0d, den2 = 0d, denResnik = 0d;
		WeightedSynset ws, wsmax = null;
		// iterate through the longer sequence
		if (wss1.length < wss2.length) {
			WeightedSynset[] temp = wss1;
			wss1 = wss2;
			wss2 = temp;
		}
		for (i = 0; i < wss1.length; i++) {
			// find the pair with max resnik sim
			for (j = 0; j < wss2.length; j++) {
				ws = resnik.resnikSimilarity(wss1[i].getSynset(), wss2[j]
						.getSynset());
				if (ws == null)
					continue;

				if (wsmax != null) {
					if (wsmax.getWeight() < ws.getWeight()) {
						wsmax = ws;
						jmax = j;
					}
				} else {
					wsmax = ws;
					jmax = j;
				}
			}
			if (wsmax != null) {
				num += wss1[i].getWeight() * wss2[jmax].getWeight()
						* wsmax.getWeight();
				denResnik += wsmax.getWeight() * wsmax.getWeight();
				wsmax = null;
			}
		}
		// denominators
		for (i = 0; i < wss1.length; i++)
			den1 += wss1[i].getWeight() * wss1[i].getWeight();
		for (i = 0; i < wss2.length; i++)
			den2 += wss2[i].getWeight() * wss2[i].getWeight();
		den1 = Math.sqrt(den1);
		den2 = Math.sqrt(den2);
		denResnik = Math.sqrt(denResnik);

		if ((den1 > 0) && (den2 > 0) && (denResnik > 0))
			return num / (den1 * den2 * denResnik);
		return 0d;
	}

	/**
	 * Similarity measure based on the Renik disambiguation algorithm.
	 * 
	 * In the 1st step, the two text are parsed and the most rapresentative
	 * synsets are extracted according with the Resnik disambiguation algorithm.
	 * 
	 * The 2nd step compares the two weighted-sets of synsets in the following
	 * way:
	 * 
	 * 1) find the pair that has the highest Resnik-similarity 2) ret +=
	 * similarity * (average of the 2 weights associated to the synsets during
	 * the disambiguation) 3) normalization (how?)
	 * 
	 * @param text1
	 * @param text2
	 * @return
	 */
	public double resnikSimilarity2(String text1, String text2) {
		//System.out.println("RESNKIK2: '" + text1 + "' vs '" + text2 + "'");
		Tag[] tags1 = null, tags2 = null;
		Tag t;
		try {
			tags1 = tagger.tag(text1);
			tags2 = tagger.tag(text2);
		} catch (Exception ex) {
			ex.printStackTrace();
			return -1d;
		}
		for (int k = 0; k < tags1.length; k++) {
			//System.out.println("tag1:" + tags1[k]);
		}
		for (int k = 0; k < tags2.length; k++) {
			//System.out.println("tag2:" + tags2[k]);
		}
		// retrieve nouns
		Tag[] nouns1 = disambiguation.nouns(tags1);
		Tag[] nouns2 = disambiguation.nouns(tags2);

		if (nouns1.length == 0 || nouns1.length == 0)
			return 0d;

		if (nouns1.length < nouns2.length) {
			Tag[] temp = nouns1;
			nouns1 = nouns2;
			nouns2 = temp;
		}
		int i, j, jmax;
		WeightedSynset ws, wsmax = null;
		double ret = 0d;
		for (i = 0; i < nouns1.length; i++) {
			for (j = 0; j < nouns2.length; j++) {
				ws = resnik.resnikSimilarity(nouns1[i].getContent(), nouns2[j]
						.getContent());
				if (ws == null)
					continue;

				if (wsmax != null) {
					if (wsmax.getWeight() < ws.getWeight()) {
						wsmax = ws;
						jmax = j;
					}
				} else {
					wsmax = ws;
					jmax = j;
				}
			}
			if (wsmax != null) {
				ret += resnik.normalize(wsmax.getWeight());
				wsmax = null;
			}
		}
		return ret / Math.sqrt(nouns1.length * nouns2.length);
	}

	/**
	 * Similarity measure based on the Renik disambiguation algorithm.
	 * 
	 * In the 1st step, the two text are parsed and the most rapresentative
	 * synsets are extracted according with the Resnik disambiguation algorithm.
	 * 
	 * The 2nd step compares the two weighted-sets of synsets in the following
	 * way:
	 * 
	 * 1) find the pair that has the highest Resnik-similarity 2) ret +=
	 * similarity * (average of the 2 weights associated to the synsets during
	 * the disambiguation) 3) normalization (how?)
	 * 
	 * @param text1
	 * @param text2
	 * @return
	 */
	public double resnikSimilarity3(String text1, String text2) {
		//System.out.println("RESNKIK3: '" + text1 + "' vs '" + text2 + "'");
		Tag[] tags1 = null, tags2 = null;
		Tag t;
		try {
			tags1 = tagger.tag(text1);
			tags2 = tagger.tag(text2);
		} catch (Exception ex) {
			ex.printStackTrace();
			return -1d;
		}
		// retrieve nouns
		Tag[] nouns1 = disambiguation.nouns(tags1);
		Tag[] nouns2 = disambiguation.nouns(tags2);

		if (nouns1.length < nouns2.length) {
			Tag[] temp = nouns1;
			nouns1 = nouns2;
			nouns2 = temp;
		}

		WeightedSynset[] wss1 = disambiguation.disambiguate(text1);
		WeightedSynset[] wss2 = disambiguation.disambiguate(text2);

		int i, j, jmax;
		Synset[] ss;
		WeightedSynset ws, wsmax = null;
		double ret = 0d;
		for (i = 0; i < nouns1.length; i++) {
			for (j = 0; j < nouns2.length; j++) {
				ss = wordNet.getSynsets(nouns1[i].getContent(),
						PartOfSpeech.NOUN);
				// pick the best synsets from ss... how???
				ws = resnik.resnikSimilarity(nouns1[i].getContent(), nouns2[j]
						.getContent());
				if (ws == null)
					continue;

				if (wsmax != null) {
					if (wsmax.getWeight() < ws.getWeight()) {
						wsmax = ws;
						jmax = j;
					}
				} else {
					wsmax = ws;
					jmax = j;
				}
			}
			if (wsmax != null) {
				ret += resnik.normalize(wsmax.getWeight());
				wsmax = null;
			}
		}
		return nouns1.length > 0 ? ret / nouns1.length : 0d;
	}

	public double leacockSimilarity(String text1, String text2) {
		WeightedSynset[] wss1 = disambiguation.disambiguate(text1);
		WeightedSynset[] wss2 = disambiguation.disambiguate(text2);
		int i, j, jmax = 0;
		double w, wtot = 0d, ret = 0d;
		WeightedSynset ws, wsmax = null;
		if (wss1.length > wss2.length) {
			WeightedSynset[] temp = wss1;
			wss1 = wss2;
			wss2 = temp;
		}
		for (i = 0; i < wss1.length; i++) {
			for (j = 0; j < wss2.length; j++) {
				ws = resnik.resnikSimilarity(wss1[i].getSynset(), wss2[j]
						.getSynset());
				if (ws == null)
					continue;

				if (wsmax != null) {
					if (wsmax.getWeight() < ws.getWeight()) {
						wsmax = ws;
						jmax = j;
					}
				} else {
					wsmax = ws;
					jmax = j;
				}
			}
			if (wsmax != null) {
				w = (wss1[i].getWeight() + wss2[jmax].getWeight()) / 2d;
				ret += w * wsmax.getWeight();
				wtot += w;
				wsmax = null;
			}
		}
		wtot /= wss1.length;
		return wtot > 0d ? ret / wtot : 0d;
	}

	/**
	 * True if the given synset is hyponym of sysnsetId.
	 * 
	 * Doesn't work!
	 */
	protected boolean hyponymOf(Synset s, String synsetId) {

		//System.out.println("HOLONYM_MEMBER");
		Synset[] ss = wordNet.getRelatedSynsets(s, Relationship.HOLONYM_MEMBER);
		for (int i = 0; i < ss.length; i++) {
			String[] words = wordNet.getWords(ss[i]);
			//for (int j = 0; j < words.length; j++)
				//System.out.println(words[j]);
		}
		//System.out.println("HOLONYM_PART");
		ss = wordNet.getRelatedSynsets(s, Relationship.HOLONYM_PART);
		for (int i = 0; i < ss.length; i++) {
			String[] words = wordNet.getWords(ss[i]);
			//for (int j = 0; j < words.length; j++)
				//System.out.println(words[j]);
		}

		//System.out.println("HOLONYM_SUBSTANCE");
		ss = wordNet.getRelatedSynsets(s, Relationship.HOLONYM_SUBSTANCE);
		for (int i = 0; i < ss.length; i++) {
			String[] words = wordNet.getWords(ss[i]);
			//for (int j = 0; j < words.length; j++)
				//System.out.println(words[j]);
		}

		return false;
	}

	public static void main(String args[]) throws Exception {
		try {
			GateTagger tagger = new GateTagger();
			File fn = new File(System.getProperty("gate.home"));
			System.out.println("Tagger: init");
			tagger.init(fn.toString());
			JwiWordNet wn = new JwiWordNet();
			fn = new File(System.getProperty("user.home"),
					"/projects/cues/WordNet-3.0/dict");
			System.out.println("WordNet: init");
			wn.init(fn.toString());
			ArrayCollectionFrequency f = new ArrayCollectionFrequency();
			BncParser p = new BncParser();
			fn = new File(System.getProperty("user.home"),
					"/projects/cues/idfs/all.num.o5");
			System.out.println("CollectionFrequency: load");
			f.load(fn.toString(), p);
			Stemmer stem = new JwiStemmer();
			System.out.println("Semantic Similarity");
			Collocations coll = new Collocations();
			coll.load("collocations.txt");
			
			System.out.println("Resnik init");
			Resnik r = new Resnik(wn, stem, f);
			r.readSynsetProbs("synsetFreqs.txt");
			
			SemanticSimilarity sim = new SemanticSimilarity(tagger, wn, stem, coll, r);
			System.out.println("Semantic Similarity: init");
			// sim.writesynsetProbs("synsetFreqs1.txt");
			// System.out.println(sim.resnikSimilarity("computer", "personal
			// computer"));
			// System.out.println(sim.resnikN());
			/*
			 * System.out.println("Semantic Similarity: similarity"); sim
			 * .similarity( "WASHINGTON (CNN) -- A U.S. commander in Iraq
			 * believes it would not be possible to withdraw troops from his
			 * region south and east of Baghdad by year's end.
			 * art.ricklynch.gi.jpg Maj. Gen. Rick Lynch said U.S.would lose its
			 * edge if troops were withdrawn. Army Maj. Gen. Rick Lynch,
			 * commander of the Multi-National Division-Center, was asked to
			 * comment on Republican Sen. John Warner's recommendation that
			 * President Bush start withdrawing U.S. troops from Iraq by
			 * Christmas. Speaking to Pentagon reporters on Friday via
			 * teleconference from Iraq, Lynch said, \"Only when the Iraqi
			 * security forces come forward and say 'OK, here I am, I'm trained
			 * and equipped, I'm ready, I'm the Iraqi Army or I'm the Iraqi
			 * police,' can I turn those sanctuaries over, and that's not going
			 * to happen between now and Christmas.\" Lynch, whose operations
			 * cover the central part of Iraq, south of Baghdad, said soldiers
			 * have been helped by the surge, or additional troops, and have
			 * made strides against militants. But, he said, \"If we were to
			 * lose that capability, the enemy would come back.\" Video Watch
			 * Lynch discuss the effect of pulling troops  \"We would take a
			 * giant step backward,\" said Lynch, adding he needs the troops to
			 * fight both Shiite and Sunni militants and to confront significant
			 * Iranian influence in the region. By next spring or summer,
			 * however, such a move might be possible if enough progress is
			 * made, he said. Warner, the influential Republican and former
			 * chairman of the Senate Armed Services Committee, on Thursday
			 * recommended that Bush announce the beginning of a U.S. withdrawal
			 * in mid-September and that those troops should be back in the
			 * United States by Christmas. \"In my humble judgment, that would
			 * get everyone's attention -- the attention that is not being paid
			 * at this time,\" Warner said. He added: \"I really, firmly believe
			 * the Iraqi government, under the leadership of Prime Minister
			 * [Nuri] al-Maliki, let our troops down.\" Warner, a Virginia
			 * Republican who was a former secretary of the Navy and is in his
			 * fifth term in the Senate, suggested that pulling 5,000 troops off
			 * the battlefield would send \"a very clear signal\" without
			 * endangering the missions being carried out by the rest. \"I say
			 * to the president, respectfully, pick whatever number you wish,\"
			 * Warner said. \"You do not want to lose the momentum, but
			 * certainly in 160,000-plus, say, 5,000 could begin to redeploy and
			 * be home to their families and loved ones no later than Christmas
			 * of this year.\" In Texas, where Bush is on vacation, National
			 * Security Council spokesman Gordon Johndroe said the White House
			 * appreciated Warner's advice. But he said the president would wait
			 * for the recommendations of Gen. David Petraeus, the U.S.
			 * commander in Iraq, and the U.S. ambassador in Baghdad, Ryan
			 * Crocker, before making any decisions. Petraeus and Crocker are to
			 * report to the White House in September on the progress of the
			 * troop surge. Warner opposed Bush's January decision to send
			 * nearly 30,000 additional U.S. troops to Iraq. But he has so far
			 * also opposed Democratic efforts to force Bush to start bringing
			 * U.S. troops home. The \"surge\" campaign was aimed at buying time
			 * for Iraq's government to reach a political solution to the
			 * sectarian and insurgent warfare that has wracked the country
			 * since the U.S. invasion in 2003. The U.S. intelligence
			 * community's latest report on Iraq, released Thursday, found
			 * \"measurable but uneven improvements\" in security in recent
			 * months. However, it concluded that Iraq's political leaders
			 * \"remain unable to govern effectively.\" But Johndroe said the
			 * report also found that U.S. troops have \"really helped to
			 * improve the security situation on the ground.\" \"If they were to
			 * leave anytime soon, those security gains could be lost,\" he
			 * said. Democrats have tried to wind down the war since taking over
			 * Congress in January, but Senate Republicans have used filibuster
			 * tactics to stymie those efforts. After Thursday's report, Senate
			 * Majority Leader Harry Reid called on Republican senators to join
			 * Democrats to force Bush to change course -- and a senior
			 * Democratic leadership aide urged Warner to add his vote to those
			 * efforts. \"Will he [Warner] vote with us on anything? That is
			 * still the open and most important question,\" the aide said. \"A
			 * recommendation to the president is different than voting for
			 * binding legislative language compelling the president to act.\"
			 * He and the Armed Services chairman, Michigan Democrat Carl Levin,
			 * recently returned from a visit to Baghdad with harsh words for
			 * the al-Maliki government. Levin said Monday that Iraq's
			 * Parliament should throw al-Maliki out of office and replace his
			 * government. Warner said he would not join that call. \"But in no
			 * way do I criticize it,\" he added. Warner met at the White House
			 * earlier Thursday with Lt. Gen. Douglas Lute, the White House
			 * official responsible for coordinating Iraq issues. advertisement
			 * Warner said the president and other leading Bush administration
			 * officials have repeatedly said the American commitment to Iraq
			 * was not open-ended. \"The time has come to put some meaningful
			 * teeth into those comments -- to back them up with some clear,
			 * decisive action,\" the senator said.",
			 * 
			 * "U.S. 1000$");
			 */
			/*
			 * sim .similarity( "Bill Gates and his self-estime live in a bed
			 * and breakfast to make up something. On Tuesday - President George
			 * W. Bush, delivered what some have called his longest, most
			 * detailed argument yet that Al Qaeda in Iraq is linked to the
			 * central Al Qaeda organization. HE won everything, and Sky is
			 * clear today. Dressed in her signature blue-bordered sari and shod
			 * in sandals despite below-zero temperatures, the former Agnes
			 * Bojaxhiu received that ultimate worldly accolade, the Nobel Peace
			 * Prize. He suffers of adult onset diabetes.", "U.S. 1000$");
			 * System.out.println("Semantic Similarity: similarity\n\n"); sim
			 * .similarity( "On Tuesday - President George W. Bush, delivered
			 * what some have called his longest, most detailed argument yet
			 * that Al Qaeda in Iraq is linked to the central Al Qaeda
			 * organization.", "U.S. 1000$"); /
			 */
			System.out.println("\n");
			double d = sim
					.resnikSimilarity(
							"Felix lands as a Cat 5: Hurricane Felix made landfall in Nicaragua this morning as a powerful Category 5 storm, the U.S. National Hurricane Center said. The storm also is hammering a part of the Honduras coast that includes hotels and expensive vacation homes. developing story.",
							"Hurricane Felix made landfall with potentially disastrous rain and top wind speeds of 160 mph (260 kph) near the Nicaragua-Honduras border Tuesday morning, the U.S. National Hurricane Center said.");
			System.out.println(d);
			d = sim.resnikSimilarity("house is white.", "house is black.");
			System.out.println(d);
			d = sim.resnikSimilarity("house is white.",
					"construction  is black.");
			System.out.println(d);
			d = sim
					.similarity(
							"Felix lands as a Cat 5: Hurricane Felix made landfall in Nicaragua this morning as a powerful Category 5 storm, the U.S. National Hurricane Center said. The storm also is hammering a part of the Honduras coast that includes hotels and expensive vacation homes. developing story.",
							"Hurricane Felix made landfall with potentially disastrous rain and top wind speeds of 160 mph (260 kph) near the Nicaragua-Honduras border Tuesday morning, the U.S. National Hurricane Center said.");
			System.out.println(d);
			d = sim.similarity("house is white.", "house is black.");
			System.out.println(d);
			d = sim.similarity("house is white.",
					"construction  is black.");
			System.out.println(d);
			d = sim.resnikSimilarity("Troop Reduction Is Possible, Bush Says.",
					"Hurricane Felix Battering Central America.");
			System.out.println(d);
			d = sim.similarity(
					"Troop Reduction Is Possible, Bush Says.",
					"Hurricane Felix Battering Central America.");
			System.out.println(d);
			d = sim
					.resnikSimilarity(
							" Suicide bombers kill 25 in Pakistan army stronghold.",
							"RAWALPINDI (AFP)  Two suicide bombings Tuesday ripped through a military bus and a market near the Pakistani army's headquarters, killing 25 people in the latest attacks aimed at destabilising President Pervez Musharraf.");
			System.out.println(d);
			d = sim
					.similarity(
							" Suicide bombers kill 25 in Pakistan army stronghold.",
							"RAWALPINDI (AFP)  Two suicide bombings Tuesday ripped through a military bus and a market near the Pakistani army's headquarters, killing 25 people in the latest attacks aimed at destabilising President Pervez Musharraf.");
			System.out.println(d);
			d = sim
					.resnikSimilarity(
							" Suicide bombers kill 25 in Pakistan army stronghold.",
							"Army spokesman Major General Waheed Arshad said a total of 25 people were killed and 68 were wounded in the blasts..");
			System.out.println(d);
			d = sim
					.similarity(
							" Suicide bombers kill 25 in Pakistan army stronghold.",
							"Army spokesman Major General Waheed Arshad said a total of 25 people were killed and 68 were wounded in the blasts..");
			System.out.println(d);
			// sim.similarity("At the direction of top officials of the Spitzer
			// administration, the state police created documents meant to cause
			// political damage to Senate Majority Leader Joseph Bruno, Attorney
			// General Andrew Cuomo said in a report ....", "U.S. 1000$");
			System.out.println("\n");
			/*
			 * List l = tagger.tag("U.S. Senator Russell Feingold wants to
			 * censure President Bush and possibly others in the Administration
			 * over events leading up to Iraq and for the handling of the Iraq
			 * War. That would be a terrible idea and a step backwards for the
			 * United States."); Iterator iter = l.iterator();
			 * 
			 * while (iter.hasNext()) { Tag t = (Tag) iter.next();
			 * System.out.println(t); }
			 */
		} catch (Exception ex) {
			System.out.println(ex.toString());
			ex.printStackTrace();
		}
	}
}
