package extraction;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import opennlp.tools.sentdetect.SentenceDetector;
import opennlp.tools.sentdetect.SentenceDetectorME;
import opennlp.tools.sentdetect.SentenceModel;

import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.mapdb.Utils;

import com.google.common.collect.ArrayListMultimap;

public class ExtractSentences {
	
	static Set<String> goldenStandard = new HashSet<String>();
	static Map<String,String> wiki_articles;
	static Map<String,ArrayListMultimap<String,String>> relations;
	static Map<String,String> entities;
	
	static SentenceModel sModel = null;	
	static SentenceDetector sent = null;
	
	static String RESOURCES = "/home/dsbatista/resources/";
	
	public static void distantSupervison(String[] args) throws IOException {
				 
        sModel = new SentenceModel(new FileInputStream(RESOURCES + "pt.sentDetectVerbPP.model"));
        sent = new SentenceDetectorME(sModel);
		
        /* select all entities whose type is PER,LOC or ORG */
        System.out.println("\nSelecting entities (PER,ORG,LOC) from " + args[1]);
        getEntities(args[1]);
                
        /* collect all relations between PER,LOC or ORG entities */
		System.out.println("\nReading relations from " + args[2]);
		if (args[2].equalsIgnoreCase("relations.n3")) collectRelations(args[2],true);
		else if (!(args[2].equalsIgnoreCase("relations.n3"))) collectRelations(args[2],false);
		System.out.println("\n" + relations.size() + " relations types");
		System.out.println();
		
		/*
		/* index wikipedia articles in MapDB 			
		System.out.println("Indexing Wikipedia Articles from " + args[3]);
		readWikiArticles(args[3]);
		System.out.println("\n" + wiki_articles.size() + " articles");
		*/
		
		/* read golden sentence sentences
		readGoldenStandard(args[4]);		
		System.out.println("\n" + goldenStandard.size() + " sentences in golden standard");
		
		System.out.println("Extracting sentences ..");
		findRelations(args[5]);
		*/
	}
	
	public static void readGoldenStandard( String file) throws IOException {
		  BufferedReader input = new BufferedReader(new FileReader(file));
		  String aux = null;
		  String sentence = null;	  
		  String e1 = null;
		  String e2 = null;	  
		  String type = null;
		  while ((aux = input.readLine()) != null) {
			  if (aux.startsWith("SENTENCE")) {
				  sentence = aux.split(": ")[1];
				  aux = input.readLine();
				  if (aux.equals("")) aux = input.readLine();
				  if (aux.startsWith("MANUALLY CHECKED : IGNORE")) continue;
				  if (aux.startsWith("MANUALLY CHECKED : FALSE")) continue;
				  if (aux.startsWith("MANUALLY CHECKED : TRUE")) {
					  while (!aux.startsWith("*")) {
						  if (aux.startsWith("ENTITY1")) e1 = aux.split(": ")[1].trim();
						  if (aux.startsWith("ENTITY2")) e2 = aux.split(": ")[1].trim();
						  if (aux.startsWith("REL TYPE")) type = aux.split(": ")[1];
						  aux = input.readLine();
					  }
				  StringBuffer i = new StringBuffer(); 			  
				  i.append(sentence);
				  i.append("|");
				  i.append(e1);
				  i.append("|");				
				  i.append(e2);				
				  i.append("|");
				  i.append(type);
				  goldenStandard.add(i.toString());			  
				  }
			  }
		  }
		  input.close();
	  }
	
	public static Map<String, String> getEntities(String file) throws IOException{		
		String person = "<http://dbpedia.org/ontology/Person> .";
		String place = "<http://dbpedia.org/ontology/Place> .";
		String org = "<http://dbpedia.org/ontology/Organisation> .";				 
		//String base = "<http://pt.dbpedia.org/resource/";
		String base = "<http://dbpedia.org/resource/";
		BufferedReader f = new BufferedReader(new FileReader(file));		
		String aux = null;
		File dbFile = Utils.tempDbFile();        
		DB db = DBMaker
                .newFileDB(dbFile)
                /** disabling Write Ahead Log makes import much faster */
                .writeAheadLogDisable()
                .make();		        
		entities = db.getTreeMap("entities");
        int num=0;
		while ((aux = f.readLine()) != null) {
			if (aux.startsWith("#")) continue;
			if ( num % 10000 == 0 ) System.out.print(".");
			if (aux.endsWith(person)) {
					entities.put(aux.split("\\s")[0].split(base)[1].replaceFirst(">", ""),"PERSON");
			}
			else if (aux.endsWith(place)) {				
				try {
					entities.put(aux.split("\\s")[0].split(base)[1].replaceFirst(">", ""),"PLACE");
				} catch (Exception e) {
					System.out.println("aux: " + aux);
					e.printStackTrace();
				}
			}
			else if (aux.endsWith(org)) {
				entities.put(aux.split("\\s")[0].split(base)[1].replaceFirst(">", ""),"ORGANISATION");
			}
			num++;
		}		
		System.out.println();
		f.close();
		return entities;
	}
	
	private static void findRelations(String outfile) throws IOException {		
		String article1;
		String article2;
		String entity2_norm = null;
		String entity1_norm = null;		
		BufferedWriter out = new BufferedWriter(new FileWriter(outfile));
		StringBuffer i = new StringBuffer();
		for (String relation : relations.keySet()) {
			System.out.println(relation + '\t' + relations.get(relation).size());
			ArrayListMultimap<String,String> r = relations.get(relation);
			for (String entity1 : r.keySet()) {				
				for (String entity2 : r.get(entity1)) {
					entity1_norm = entity1.replaceAll("_", " ").replaceAll("\\(.*\\)", "");
					entity2_norm = entity2.replaceAll("_", " ").replaceAll("\\(.*\\)", "");
					
					if (entity1_norm.equals(entity2_norm)) continue;
					
					relation = relation.replaceFirst("<http://dbpedia.org/ontology/","").replaceFirst(">", "");										
					relation = relation.replaceFirst("http://dbpedia.org/ontology/","");
					article1 = wiki_articles.get(entity1);
					article2 = wiki_articles.get(entity2);
					
					//sentences must contain both entities and not be one of the ending sections with lists like:
					// * Appetite for Destruction (1987) * G N' R Lies (1988) * Use Your Illusion I (1991) * Use Your Illusion II (1991) * The Spaghetti Incident? (1993)";
					
					if (article1 != null) {						
						for ( String s : sent.sentDetect(article1) ) {
							if (s.contains(entity1_norm) && s.contains(entity2) && !(s.matches(".*\\*.*.*\\*.*.*\\*.*"))) {
								i = new StringBuffer(); 
								i.append(s);
								i.append("|");
								i.append(entity1_norm);
								i.append("|");				
								i.append(entity2_norm);				
								i.append("|");
								i.append(relation);			
								if (!goldenStandard.contains(i.toString())) {
									out.write("SENTENCE : " + s + "\n");
									out.write("\n");
									out.write("MANUALLY CHECKED : FALSE" + "\n");
									out.write("\n");
									out.write("ENTITY1 : " + entity1_norm + "\n");
									out.write("TYPE1 : " + entities.get(entity1) + "\n");
									out.write("ENTITY2 : " + entity2_norm + "\n");
									out.write("TYPE2 : " + entities.get(entity2) + "\n");
									out.write("REL TYPE : " + relation + "\n");
									out.write("************************\n");
								}
							}
						}
					}
					
					if (article2 != null) {
						for ( String s : sent.sentDetect(article2) ) { 							
							if (s.contains(entity1_norm) && s.contains(entity2) && !(s.matches(".*\\*.*.*\\*.*.*\\*.*"))) {
								i = new StringBuffer(); 
								i.append(s);
								i.append("|");
								i.append(entity1_norm);
								i.append("|");				
								i.append(entity2_norm);				
								i.append("|");
								i.append(relation);			
								if (!goldenStandard.contains(i.toString())) {
									out.write("SENTENCE : " + s + "\n");
									out.write("\n");
									out.write("MANUALLY CHECKED : FALSE" + "\n");
									out.write("\n");
									out.write("ENTITY1 : " + entity1_norm + "\n");
									out.write("TYPE1 : " + entities.get(entity1) + "\n");
									out.write("ENTITY2 : " + entity2_norm + "\n");
									out.write("TYPE2 : " + entities.get(entity2) + "\n");
									out.write("REL TYPE : " + relation + "\n");
									out.write("************************\n");
								}
							}
						}
					}
				}
			}
		}
		out.close();
	}
		
	private static void readWikiArticles(String file) throws IOException{
		BufferedReader br = new BufferedReader(new FileReader(file));				
		String article = null;
		String title = null;
		String line;		
		File dbFile = Utils.tempDbFile();
        DB db = DBMaker
                .newFileDB(dbFile)
                /** disabling Write Ahead Log makes import much faster */
                .writeAheadLogDisable()
                .make();
		        
        wiki_articles = db.getTreeMap("wiki");
        	    
		int num=0;
		
		while ((line = br.readLine()) != null) {
			if ( num % 10000 == 0 ) System.out.print(".");
			try {
				title = line.split("\\t")[0];
				article = line.split("\\t")[1];
				wiki_articles.put(title, article);
			} catch (Exception e) {
				
			}
			num++;			
			
			
		}
		br.close();	
	}

	private static void collectRelations(String file, boolean overInference) throws FileNotFoundException, IOException {
		relations = new HashMap<String, ArrayListMultimap<String,String>>();
		int num=0;
		Reader in = new InputStreamReader(new FileInputStream(file), "UTF8");
		BufferedReader br = new BufferedReader(in);
		String line = null;
		String entity1 = null;
		String entity2 = null;
		String relation = null;
		//relations to ignore		
		String[] ignore = {
		"<http://xmlns.com/foaf/0.1/name>",
		"<http://www.georss.org/georss/point>",
		"http://www.w3.org/1999/02/22-rdf-syntax-ns#type",
		"<http://xmlns.com/foaf/0.1/homepage>",
		"<http://xmlns.com/foaf/0.1/logo>",
		"<http://dbpedia.org/ontology/date>",
		"<http://dbpedia.org/ontology/position>",
		"<http://www.w3.org/2003/01/geo/wgs84_pos#lat>",
		"<http://purl.org/dc/elements/1.1/description>",
		"<http://www.w3.org/2003/01/geo/wgs84_pos#long>",
		"<http://www.w3.org/2004/02/skos/core#subject>",
		"<http://dbpedia.org/ontology/piercing>",
		"<http://dbpedia.org/ontology/number>",
		"<http://dbpedia.org/ontology/numberOfCounties>",
		"<http://dbpedia.org/ontology/numberOfEmployees>",
		"<http://dbpedia.org/ontology/numberOfRooms>",
		"<http://dbpedia.org/ontology/numberOfStaff>",
		"<http://dbpedia.org/ontology/numberOfParkingSpaces>",
		"<http://dbpedia.org/ontology/numberOfUndergraduateStudents>",
		"<http://dbpedia.org/ontology/numberOfPostgraduateStudents>",
		"<http://dbpedia.org/ontology/numberOfStudents>",
		"<http://dbpedia.org/ontology/numberOfMunicipalities>",
		"<http://dbpedia.org/ontology/numberOfVisitors>",
		"<http://dbpedia.org/ontology/numberOfMembers>",
		"<http://dbpedia.org/ontology/numberOfFilms>",
		"<http://dbpedia.org/ontology/numberOfFederalDeputies>",
		"<http://dbpedia.org/ontology/numberOfStateDeputies>",
		"<http://dbpedia.org/ontology/numberOfDoctoralStudents>",
		"<http://dbpedia.org/ontology/numberOfVolunteers>",
		"<http://dbpedia.org/ontology/numberOfTerritories>",
		"<http://dbpedia.org/ontology/numberOfMinistries>",
		"<http://dbpedia.org/ontology/numberOfCountries>",
		"<http://dbpedia.org/ontology/numberOfCapitalDeputies>",
		"<http://dbpedia.org/ontology/membership>",
		"<http://dbpedia.org/ontology/censusYear>",
		"<http://dbpedia.org/ontology/openingYear>",
		"<http://dbpedia.org/ontology/depth>",
		"<http://dbpedia.org/ontology/populationTotal>",
		"<http://dbpedia.org/ontology/appearancesInNationalTeam>",
		"<http://dbpedia.org/ontology/bedCount>",
		"<http://dbpedia.org/ontology/capacity>",
		"<http://dbpedia.org/ontology/facultySize>",
		"<http://dbpedia.org/ontology/floorCount>",
		"<http://dbpedia.org/ontology/number>",
		"<http://dbpedia.org/ontology/numberOfCapitalDeputies>",
		"<http://dbpedia.org/ontology/numberOfCounties>",
		"<http://dbpedia.org/ontology/numberOfCountries>",
		"<http://dbpedia.org/ontology/numberOfDoctoralStudents>",
		"<http://dbpedia.org/ontology/numberOfEmployees>",
		"<http://dbpedia.org/ontology/numberOfFederalDeputies>",
		"<http://dbpedia.org/ontology/numberOfFilms>",
		"<http://dbpedia.org/ontology/numberOfMembers>",
		"<http://dbpedia.org/ontology/numberOfMinistries>",
		"<http://dbpedia.org/ontology/numberOfMunicipalities>",
		"<http://dbpedia.org/ontology/numberOfParkingSpaces>",
		"<http://dbpedia.org/ontology/numberOfPostgraduateStudents>",
		"<http://dbpedia.org/ontology/numberOfRooms>",
		"<http://dbpedia.org/ontology/numberOfStaff>",
		"<http://dbpedia.org/ontology/numberOfStateDeputies>",
		"<http://dbpedia.org/ontology/numberOfStudents>",
		"<http://dbpedia.org/ontology/numberOfTerritories>",
		"<http://dbpedia.org/ontology/numberOfUndergraduateStudents>",
		"<http://dbpedia.org/ontology/numberOfVisitors>",
		"<http://dbpedia.org/ontology/numberOfVolunteers>",
		"<http://dbpedia.org/ontology/partyNumber>",
		"<http://dbpedia.org/ontology/passengersPerYear>",
		"<http://dbpedia.org/ontology/populationMetro>",
		"<http://dbpedia.org/ontology/populationRural>",
		"<http://dbpedia.org/ontology/populationTotal>",
		"<http://dbpedia.org/ontology/populationTotalRanking>",
		"<http://dbpedia.org/ontology/populationUrban>",
		"<http://dbpedia.org/ontology/seatingCapacity>",
		"<http://dbpedia.org/ontology/sessionNumber>",
		"<http://dbpedia.org/ontology/shoeNumber>",
		"<http://dbpedia.org/ontology/staff>",
		"<http://dbpedia.org/ontology/vehiclesPerDay>",
		"<http://dbpedia.org/ontology/allegiance>",
		"<http://dbpedia.org/ontology/birthDate>",
		"<http://dbpedia.org/ontology/closingDate>",
		"<http://dbpedia.org/ontology/date>",
		"<http://dbpedia.org/ontology/deathDate>",
		"<http://dbpedia.org/ontology/firstAirDate>",
		"<http://dbpedia.org/ontology/formationDate>",
		"<http://dbpedia.org/ontology/foundingDate>",
		"<http://dbpedia.org/ontology/functionEndDate>",
		"<http://dbpedia.org/ontology/functionStartDate>",
		"<http://dbpedia.org/ontology/openingDate>",
		"<http://dbpedia.org/ontology/populationAsOf>",
		"<http://dbpedia.org/ontology/titleDate>",
		"<http://dbpedia.org/ontology/iafdId>",
		"<http://dbpedia.org/ontology/imdbId>",
		"<http://dbpedia.org/ontology/purpose>",
		"<http://dbpedia.org/ontology/leaderTitle>",
		"<http://dbpedia.org/ontology/officialSchoolColour>",
		"<http://dbpedia.org/ontology/title>",
		"<http://dbpedia.org/ontology/abbreviation>",
		"<http://dbpedia.org/ontology/afdbId>",
		"<http://dbpedia.org/ontology/agencyStationCode>",
		"<http://dbpedia.org/ontology/alias>",
		"<http://dbpedia.org/ontology/allegiance>",
		"<http://dbpedia.org/ontology/areaCode>",
		"<http://dbpedia.org/ontology/background>",
		"<http://dbpedia.org/ontology/bgafdId>",
		"<http://dbpedia.org/ontology/birthName>",
		"<http://dbpedia.org/ontology/callSign>",
		"<http://dbpedia.org/ontology/colourHexCode>",
		"<http://dbpedia.org/ontology/colourName>",
		"<http://dbpedia.org/ontology/committee>",
		"<http://dbpedia.org/ontology/criteria>",
		"<http://dbpedia.org/ontology/demonym>",
		"<http://dbpedia.org/ontology/draftPick>",
		"<http://dbpedia.org/ontology/draftRound>",
		"<http://dbpedia.org/ontology/egafdId>",
		"<http://dbpedia.org/ontology/eurobabeIndexId>",
		"<http://dbpedia.org/ontology/faaLocationIdentifier>",
		"<http://dbpedia.org/ontology/fansgroup>",
		"<http://dbpedia.org/ontology/fate>",
		"<http://dbpedia.org/ontology/formerName>",
		"<http://dbpedia.org/ontology/hasNaturalBust>",
		"<http://dbpedia.org/ontology/iafdId>",
		"<http://dbpedia.org/ontology/iataLocationIdentifier>",
		"<http://dbpedia.org/ontology/icaoLocationIdentifier>",
		"<http://dbpedia.org/ontology/id>",
		"<http://dbpedia.org/ontology/identificationSymbol>",
		"<http://dbpedia.org/ontology/imdbId>",
		"<http://dbpedia.org/ontology/leaderTitle>",
		"<http://dbpedia.org/ontology/lifeExpectancy>",
		"<http://dbpedia.org/ontology/mascot>",
		"<http://dbpedia.org/ontology/measurements>",
		"<http://dbpedia.org/ontology/membership>",
		"<http://dbpedia.org/ontology/militaryCommand>",
		"<http://dbpedia.org/ontology/motto>",
		"<http://dbpedia.org/ontology/notes>",
		"<http://dbpedia.org/ontology/nutsCode>",
		"<http://dbpedia.org/ontology/office>",
		"<http://dbpedia.org/ontology/officialSchoolColour>",
		"<http://dbpedia.org/ontology/orderInOffice>",
		"<http://dbpedia.org/ontology/piercing>",
		"<http://dbpedia.org/ontology/position>",
		"<http://dbpedia.org/ontology/postalCode>",
		"<http://dbpedia.org/ontology/pseudonym>",
		"<http://dbpedia.org/ontology/purpose>",
		"<http://dbpedia.org/ontology/slogan>",
		"<http://dbpedia.org/ontology/status>",
		"<http://dbpedia.org/ontology/subtitle>",
		"<http://dbpedia.org/ontology/supplementalDraftRound>",
		"<http://dbpedia.org/ontology/synonym>",
		"<http://dbpedia.org/ontology/tatoo>",
		"<http://dbpedia.org/ontology/title>",
		"<http://dbpedia.org/ontology/topLevelDomain>",
		"<http://dbpedia.org/ontology/utcOffset>",
		"<http://dbpedia.org/ontology/vehicleCode>",
		"<http://purl.org/dc/elements/1.1/description>",
		"<http://www.georss.org/georss/point>",
		"<http://xmlns.com/foaf/0.1/familyName>",
		"<http://xmlns.com/foaf/0.1/givenName>",
		"<http://xmlns.com/foaf/0.1/name>",
		"<http://xmlns.com/foaf/0.1/nick>",
		"<http://xmlns.com/foaf/0.1/depiction>",		
		"<http://dbpedia.org/ontology/giniCoefficient>",
		"<http://dbpedia.org/ontology/humanDevelopmentIndex>",
		"<http://dbpedia.org/ontology/illiteracy>",
		"<http://dbpedia.org/ontology/infantMortality>",
		"<http://dbpedia.org/ontology/percentageOfAreaWater>",
		"<http://dbpedia.org/ontology/shareOfAudience>",
		"<http://www.w3.org/2003/01/geo/wgs84_pos#lat>",
		"<http://www.w3.org/2003/01/geo/wgs84_pos#long>",
		"<http://dbpedia.org/ontology/area>",
		"<http://dbpedia.org/ontology/areaLand>",
		"<http://dbpedia.org/ontology/areaMetro>",
		"<http://dbpedia.org/ontology/areaTotal>",
		"<http://dbpedia.org/ontology/areaUrban>",
		"<http://dbpedia.org/ontology/areaWater>",
		"<http://dbpedia.org/ontology/depth>",
		"<http://dbpedia.org/ontology/distanceToCapital>",
		"<http://dbpedia.org/ontology/elevation>",
		"<http://dbpedia.org/ontology/floorArea>",
		"<http://dbpedia.org/ontology/frequency>",
		"<http://dbpedia.org/ontology/height>",
		"<http://dbpedia.org/ontology/length>",
		"<http://dbpedia.org/ontology/maximumElevation>",
		"<http://dbpedia.org/ontology/minimumElevation>",
		"<http://dbpedia.org/ontology/populationDensity>",
		"<http://dbpedia.org/ontology/populationMetroDensity>",
		"<http://dbpedia.org/ontology/temperature>",
		"<http://dbpedia.org/ontology/timeInSpace>",
		"<http://dbpedia.org/ontology/volume>",
		"<http://dbpedia.org/ontology/weight>",
		"<http://dbpedia.org/ontology/width>",
		"<http://dbpedia.org/ontology/activeYearsEndYear>",
		"<http://dbpedia.org/ontology/activeYearsStartYear>",
		"<http://dbpedia.org/ontology/censusYear>",
		"<http://dbpedia.org/ontology/closingYear>",
		"<http://dbpedia.org/ontology/draftYear>",
		"<http://dbpedia.org/ontology/eruptionYear>",
		"<http://dbpedia.org/ontology/extinctionYear>",
		"<http://dbpedia.org/ontology/firstAscentYear>",
		"<http://dbpedia.org/ontology/foundingYear>",
		"<http://dbpedia.org/ontology/openingYear>",
		"<http://dbpedia.org/ontology/supplementalDraftYear>",
		"<http://dbpedia.org/ontology/undraftedYear>",
		"<http://dbpedia.org/ontology/visitorStatisticsAsOf>",
		"<http://dbpedia.org/ontology/year>",
		"<http://dbpedia.org/ontology/careerPrizeMoney>",
		"<http://dbpedia.org/ontology/cost>",
		"<http://dbpedia.org/ontology/endowment>",
		"<http://dbpedia.org/ontology/grossDomesticProduct>",
		"<http://dbpedia.org/ontology/netIncome>",
		"<http://dbpedia.org/ontology/networth>",
		"<http://dbpedia.org/ontology/perCapitaIncome>",
		"<http://dbpedia.org/ontology/salary>",
		"<http://dbpedia.org/ontology/toll>",
		"<http://dbpedia.org/ontology/inseeCode>",
		"<http://dbpedia.org/ontology/englishName>",
		"<http://dbpedia.org/ontology/alemmanicName>",
		"<http://dbpedia.org/ontology/fipsCode>",
		"<http://dbpedia.org/ontology/cityType>",
		"<http://dbpedia.org/ontology/tattoo>",
		"<http://dbpedia.org/ontology/buildingStartDate>",
		"<http://dbpedia.org/ontology/formationYear>"};
		
		BufferedWriter out = null;
		List<String> ignoreList = Arrays.asList(ignore);
		if (!overInference) {
			out = new BufferedWriter(new FileWriter("mappingbased_properties_pt_per_loc_org.ttl"));
		}
		
		HashSet<String> alreadyDefined = new HashSet<String>();
		String base = "<?http://dbpedia.org/resource/";
		//String base = "<?http://pt.dbpedia.org/resource/"
		
		while ( (line = br.readLine()) != null) {
			if (line.startsWith("#")) continue;
			if ( num % 10000 == 0 ) System.out.print(".");			
			relation = line.split("\\s")[1];	
			entity1 = line.split("\\s")[0].split(base)[1].replaceFirst(">", "");
			try {
				entity2 = line.split("\\s")[2].split(base)[1].replaceFirst(">", "");
			} catch (Exception e) {
				continue;
				//System.out.println(line);
				//e.printStackTrace();
			}
			
			if (!ignoreList.contains(relation) && entities.get(entity1) != null && entities.get(entity2) != null) {
				String url_etype1 = null;
				String url_etype2 = null;				
				String etype = entities.get(entity1);
				if (etype.equalsIgnoreCase("PERSON")) url_etype1 = "<http://dbpedia.org/ontology/Person>";
				if (etype.equalsIgnoreCase("PLACE")) url_etype1 = "<http://dbpedia.org/ontology/Place>";
				if (etype.equalsIgnoreCase("ORGANISATION")) url_etype1 = "<http://dbpedia.org/ontology/Organisation>";
				etype = entities.get(entity2);				
				if (etype.equalsIgnoreCase("PERSON")) url_etype2 = "<http://dbpedia.org/ontology/Person>";
				if (etype.equalsIgnoreCase("PLACE")) url_etype2 = "<http://dbpedia.org/ontology/Place>";
				if (etype.equalsIgnoreCase("ORGANISATION")) url_etype2 = "<http://dbpedia.org/ontology/Organisation>";
				
				if (!overInference) {
					//Write triple to file, which is to be indexed in a triple-store, to make inference over PER,LOC,ORG only
					if (!alreadyDefined.contains(entity1)) {
						out.write(line.split("\\s")[0] + ' ' + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + ' ' + url_etype1 + " . \n");
						alreadyDefined.add(entity1);
					}
					if (!alreadyDefined.contains(entity2)) {
						out.write(line.split("\\s")[2] + ' ' + "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + ' ' + url_etype2 + " . \n");
						alreadyDefined.add(entity2);
					}					
					out.write(line.split("\\s")[0] + ' ' + line.split("\\s")[1] + ' ' + line.split("\\s")[2] + " . \n");
				}		
				ArrayListMultimap<String,String> tmp = relations.get(relation);
				if (tmp==null) {
					tmp = ArrayListMultimap.create();
					tmp.put(entity1, entity2);
					relations.put(relation, tmp);
				} else tmp.put(entity1, entity2);
			}							
			num++;
		}		
		br.close();
		if (!overInference) out.close();
	}

	public static void buildRelations(String file) throws IOException {
		relations = new HashMap<String, ArrayListMultimap<String,String>>();
		int num=0;
		
		Reader in = new InputStreamReader(new FileInputStream(file), "UTF8");
		BufferedReader br = new BufferedReader(in);
		
		String line;
		String entity1 = null;
		String entity2 = null;
		String relation;
		
		while ( (line = br.readLine()) != null) {
			if (line.startsWith("#")) continue;
			if ( num % 10000 == 0 ) System.out.print(".");		    		    			
			relation = line.split("\\s")[1];
			entity1 = line.split("\\s")[0].split("http://pt.dbpedia.org/resource/")[1];			
			entity2 = line.split("\\s")[2].split("http://pt.dbpedia.org/resource/")[1].split(" .")[0];				
			ArrayListMultimap<String,String> tmp = relations.get(relation);				
			if (tmp==null) {
				tmp = ArrayListMultimap.create();
				tmp.put(entity1, entity2);
				relations.put(relation, tmp);
			} else tmp.put(entity1, entity2);				
			num++;
		}		
		br.close();		
	}	
}




