package sis.ra.evaluation;

import java.io.File;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;

import org.jdom.Document;
import org.jdom.Element;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;

import sis.ra.partofspeech.wordindex;
import sis.ra.utility.InxEntity;
import sis.ra.utility.Utils;

public class extractVerbOfLaunch {

	public static ArrayList<InxEntity> parseIMSXMLContentList(String xmlfile)
	{
		ArrayList<InxEntity> ens = new ArrayList<InxEntity>();
		ArrayList<String> names=new ArrayList<String> ();
		Document document = null;
		try {
//			document = new SAXBuilder().build(systemId)
//			System.out.println(xmlfile);
			document = new SAXBuilder().build(new StringReader(xmlfile));
		
		Element root = document.getRootElement();
		Namespace inx = root.getNamespace("inxenv");
		Namespace inx2 = Namespace.getNamespace("http://www.inxight.com/ims/response/4.6");

		Element docprocnode = root.getChild("payload",inx).getChild("document-processing",inx2);
		Element entitylist= docprocnode.getChild("extract-entities",inx2).getChild("entities",inx2);
		List children = entitylist.getChildren("entity",inx2);
		for (Object entity : children) {
			Element el = (Element)entity;
			String canonical=el.getAttributeValue("canonical");
			String canonicaltype=el.getAttributeValue("canonical-type");
			String surface=el.getAttributeValue("surface");
			String entitytype=el.getAttributeValue("entity-type");
			String offset=el.getAttributeValue("offset");
			String confidence=el.getAttributeValue("confidence");
			String relevance=el.getAttributeValue("relevance");
//		InxEntity en=new InxEntity("","", "",surface,entitytype,"");
//		public InxEntity(String canoical, String canoicalType, String confidence,
//		String entityType, String id, String offset, String position,
//		String relevance, String surface) {
			if (entitytype.equals("NOUN_GROUP")||entitytype.equals("PROP_MISC")||entitytype.equals("ADDRESS_INTERNET"))
			{
				InxEntity en=new InxEntity(canonical, canonicaltype, confidence, entitytype, "", offset, "", relevance, surface);	
				ens.add(en);
			}
		}
		
		} catch (Exception e) {
			System.out.println(xmlfile);
			e.printStackTrace();

		}
		return ens;
	}
	
	public static boolean isPattern(String sentence, String COMPANY, String verb)
	{
		boolean is=false;
		boolean isPassive=false;
		HashMap<Integer, wordindex> pos=seperateWikipediaArticleSentence.pos(sentence,"text");
		String possent=seperateWikipediaArticleSentence.toString(pos);
		if (possent.contains("V-PaPart"))
			isPassive=true;
		System.out.println(possent);
		ArrayList<InxEntity> entities=parseIMSXMLContentList(Utils.getNE(sentence));
		int verbindex=sentence.indexOf(verb);
		System.out.println(verbindex);
		for(int t=0;t<entities.size();t++)
		{
			int offset=Integer.parseInt(entities.get(t).getOffset());
			if (offset>verbindex&&!isPassive)
				System.out.println(entities.get(t).getEntityType()+" "+entities.get(t).getCanoical());
			if (offset<verbindex&&isPassive)
				System.out.println(entities.get(t).getEntityType()+" "+entities.get(t).getCanoical());
		}
		return is;
	}
	public static void main(String[] args) {
		String filepathstr="C:\\Documents and Settings\\I820753\\Desktop\\dataset\\testing company\\";//Amazon.com_wiki.txt";
		
		File filepath=new File (filepathstr);
		File[] files=filepath.listFiles();
		
		String verbs="launch";//;announce;acquire;release;buy;say;use;add;offer;own;get;provide;go;download;introduce;unveil;develop;make;plan;join;create;call;include;become;test;open;find;do;take;";
		String companyname="Amazon.com;Cheron Corporation;ConocoPhillips;eBay;ExxonMobil;Frontier Oil;Google;Hess Corporation;Marathon Oil;Microsoft;Oracle Corporation";
		String[] verbss=verbs.split(";");
		for (int m=0;m<verbss.length;m++)
		{
			int count=0;
			String verb=verbss[m];//"announce";
			for (int i=0;i<files.length;i++)
			{
				if (!files[i].toString().contains(".segsent")) continue;
			
	//		System.out.println(files[i].toString());
	//		Utils.writeToFile("C:\\Documents and Settings\\I820753\\Desktop\\dataset\\wiki\\"+verb+".txt", files[i].toString(), true,true);
			String content=Utils.readFile(files[i].toString());
			String [] contents=content.split("\n");
			for (int j=0;j<contents.length;j++)
			{
				if (contents[j].contains(verb))
				{
					System.out.println(files[i].toString().substring(66,71)+contents[j]);
					String sentno=files[i].toString().substring(66,71)+contents[j].split("\t")[0];
					String sent=contents[j].split("\t")[1];
					System.out.println(sent);
					
					if (isPattern(sent,files[i].toString().substring(66).replace(".segsent", ""), verb))
					{
						System.out.println("pattern");
					}
	//				Utils.writeToFile("C:\\Documents and Settings\\I820753\\Desktop\\dataset\\wiki\\"+verb+".txt", contents[j], true,true);
					count=count+1;
				}
			}
		}
		System.out.println(verb+"\t"+count);
	//	Utils.writeToFile("C:\\Documents and Settings\\I820753\\Desktop\\dataset\\wiki\\"+verb+".txt", count+"", true,true);
	}
	}
}
