package sanitycheck;

import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Map.Entry;

import setting.BioNLPSettings;
import util.io.FileUtil;
import abs.syn.Dependency;
import abs.syn.Parse;
import def.Argument;
import def.BioNLPDocument;
import def.Event;
import def.TSpan;
import exp.Count;

public class SimpleCheck {
	private static void test(){
		String[] lines = FileUtil.getTextFromFile("y:/data02/xiaoling/bionlp/train_trigger_pool_view1.svm").split("\n");
		Hashtable<String, String> table = new Hashtable<String, String>();
		for (String line: lines){
			String data = line.substring(line.indexOf(" "));
			if (table.containsKey(data)){
				if (!table.get(data).equals(line))
					System.err.println("inconsistent data");
			}
			else{
				if (line.startsWith("1"))
					table.put(data, "+"+line);
				else{
					table.put(data, line);
				}
			}
		}
		
		lines = FileUtil.getTextFromFile("y:/data02/xiaoling/bionlp/dev_trigger_view1.svm").split("\n");
		StringBuilder sb = new StringBuilder(), sb2 = new StringBuilder();
		int wrongLabels = 0;
		for (String line: lines){
			String data = line.substring(line.indexOf(" "));
			if (table.containsKey(data)){
				sb.append(table.get(data)+"\n");
				if (!table.get(data).equals(line)){
					if (line.startsWith("+1"))
						wrongLabels ++;
				}
//				sb.append(line+"\n");
			}
			else{
				sb2.append(line+"\n");
			}
		}
		System.out.println(wrongLabels);
		FileUtil.writeTextToFile(sb.toString(), "y:/data02/xiaoling/bionlp/train_trigger_pool_without_training_data_view1.svm");
		FileUtil.writeTextToFile(sb2.toString(), "y:/data02/xiaoling/bionlp/train_trigger_out_pool_without_training_data_view1.svm");
	}
	
	public static void main(String[] args){
//		for (String file: new File("test_predictions").list()){
//			if (file.endsWith(".a2.t1")){
//				fix("test_predictions/"+file);
//			}
//		}
//		checkSVMModel("svm_edge_model2");
		test();
		System.exit(-1);
		BioNLPSettings.init(args[0]);
		BioNLPSettings.print();
		Count sum = new Count();
		String path  =BioNLPSettings.getValue("dataPath");
		//			BioNLPSettings.parsePath = BioNLPSettings.trainParsePath;
		for (String filename :  new File(path).list()){
			//						String filename = "9825820.txt";
			if (filename.endsWith(".txt")){
				//					if (r.nextDouble()> 0.1)
				//						continue;
				String filePrefix = filename.replace(".txt", "");
				System.out.println(filename+ " to be processed");
				sum.add(processDocument(path, filePrefix));
			}
		}
		
		for (String key: freq.keySet()){
			float s = 0.0f;
			for (String key2: freq.get(key).keySet()){
				s += freq.get(key).get(key2);
			}
			for (String key2: freq.get(key).keySet()){
				freq.get(key).put(key2, freq.get(key).get(key2)/s);
			}
			System.err.println(key+" = "+freq.get(key));
		}
		
		BioNLPSettings.init("dev.conf");
		path  =BioNLPSettings.getValue("dataPath");
		for (String filename :  new File(path).list()){
			//						String filename = "9825820.txt";
			if (filename.endsWith(".txt")){
				//					if (r.nextDouble()> 0.1)
				//						continue;
				String filePrefix = filename.replace(".txt", "");
				System.out.println(filename+ " to be processed");
				sum.add(predictDocument(path, filePrefix));
			}
		}
		System.out.println(sum);
	}
	private static void checkSVMModel(String string) {
		String[] lines = FileUtil.getTextFromFile("edgeFeatureMap2").split("\n");
		int len = lines.length+1;
		String[] features = new String[len];
		if (lines.length!=1){
			System.out.println("reading ... "+lines.length +" feature map");
			for (String line: lines){
				String[] pair = line.split("\t");
//				str2id.put(pair[0], Integer.parseInt(pair[1]));
				features[Integer.parseInt(pair[1])] = pair[0];
			}
		}
//		Hashtable<String, Integer> str2id = new Hashtable<String, Integer>();
		
		lines = FileUtil.getTextFromFile(string).split("\n");
		float[] weights = new float[len]; 
		for (int i = 0; i < lines.length; i++){
			if (lines[i].endsWith("#")){
				String[] items = lines[i].split(" ");
				double w = Double.parseDouble(items[0]);
				for (int j = 1; j < items.length - 1; j++){
					String[] pair = items[j].split(":");
					weights[Integer.parseInt(pair[0])] += (float)w;
				}
			}
		}
		
		for (int i = 1; i < weights.length; i++){
			System.err.println(features[i] + "\t"+i+"\t"+weights[i]);
		}
		
	}
	public static Count processDocument(String path, String filePrefix){
		Count c = new Count();
		BioNLPDocument doc = new BioNLPDocument(path, filePrefix);
		
		if (!doc.valid){
			return c;
		}
//		checkRegArg(doc);
//		checkInduction(doc);
//		checkPrep(doc);
		checkTSpan(doc);
		return c;
	}
	
	
	
	
	private static void checkTSpan(BioNLPDocument doc) {
		// doc
		System.err.println(doc.fileId+"::");
		HashSet<TSpan> visited = new HashSet<TSpan>();
		
		for (Argument arg : doc.tspans.values()){
			if (arg instanceof Event && !visited.contains(arg.tspan)){
				System.err.println("[TRUTH] "+arg.tspan);
				for (TSpan ts: visited){
					if (ts.startIdx==arg.tspan.startIdx){
						System.err.println("[SAME] "+ts+"  ==  "+arg.tspan);
					}
				}
				visited.add(arg.tspan);
			}
		}
		
		// tspan from predictions
		System.err.println("--------------");
		Hashtable<String, TSpan> pred = new Hashtable<String, TSpan>();
		String[] lines = FileUtil.getTextFromFile(BioNLPSettings.getValue("PRED_DIR")
				+doc.fileId+BioNLPSettings.getValue("EventAnnotationSuffix")).split("\n");
		for (int i = 0 ; i < lines.length;i++){
			if (lines[i].startsWith("T")){
				TSpan ts = new TSpan();
				ts.parse(lines[i]);
				pred.put(ts.tid, ts);
			}
			else if (lines[i].startsWith("E")){
				int start = lines[i].indexOf(":")+1;
				int end = lines[i].indexOf(" ");
				String key =lines[i].substring(start, end);
				if (pred.containsKey(key)){
					System.err.println("[PRED] "+pred.get(key));
					pred.remove(key);
				}
			}
		}
		
	}




	private static Hashtable<String, Hashtable<String, Float>> freq = new Hashtable<String, Hashtable<String, Float>>();
	
	private static void checkPrep(BioNLPDocument doc) {
		if (doc.parses == null){
			System.err.println("[NO_PARSE]"+doc.fileId);
			return;
		}
		for (Parse parse: doc.parses){
			for (Dependency dep: parse.dependencies){
				
				if (dep.name.startsWith("prep_") /*|| dep.name.startsWith("conj_")*/){
					if (!freq.containsKey(dep.gov)){
						freq.put(dep.gov, new Hashtable<String, Float>());
					}
					String feature = dep.name/*+":"+dep.dep*/;
//					if (dep.name.endsWith(parse.words[dep.govIdx]))
//						feature += "_D";
					if (!freq.get(dep.gov).containsKey(feature)){
						freq.get(dep.gov).put(feature, 0.0f);
					}
					
//					if (dep.name.equals("prep_in")&& dep.gov.equals("associated")){
//						System.err.println("[PREP_IN] "+doc.fileId);
//					}
					freq.get(dep.gov).put(feature, freq.get(dep.gov).get(feature)+1);
				}
			}
		}
	}
	
	public static Count predictDocument(String path, String filePrefix){
		Count c = new Count();
		BioNLPDocument doc = new BioNLPDocument(path, filePrefix);
		
		if (!doc.valid || doc.parses == null){
			return c;
		}
		for (Parse parse: doc.parses){
			for (Dependency dep: parse.dependencies){
				if (dep.name.startsWith("prep_")){
					for (Dependency dep2: parse.dependencies){
						if (dep == dep2) continue;
						if (dep.depIdx == dep2.govIdx && dep2.name.startsWith("prep_")){
							Hashtable<String, Float> table = freq.get(dep.gov);
							Float f1 = null;
							if (table!=null)
								f1 = table.get(dep2.name);
							table = freq.get(dep2.gov);
							Float f2 = null;
							if (table!=null)
								f2 = table.get(dep2.name);
							String orig = dep+" ;;; "+dep2;
							String pred = "";
							if (f1 == null){
								if (f2 == null){
									pred = orig +" = ORIG";
								}
								else{
									pred = orig + " = ORIG";
								}
							}else{
								if (f2 == null){
									pred = dep.name+"("+dep.gov+", "+dep.dep+")"+" ;;; "+dep2.name+"("+dep.gov+", "+dep2.dep+")";
								}
								else{
									if (f1 > f2){
										pred = dep.name+"("+dep.gov+", "+dep.dep+")"+" ;;; "+dep2.name+"("+dep.gov+", "+dep2.dep+")";
									}
									else{
										pred = orig + " = ORIG";
									}
								}
							}
							System.err.println("[SCORE] "+doc.fileId+" : "+ f1 +" , "+f2);
							System.err.println("[ORIG] "+ orig);
							System.err.println("[PRED] "+ pred);
						}
					}
				}
			}
		}
		return c;
	}
	
	/**
	 * 
	 * @param doc
	 */
	private static void checkInduction(BioNLPDocument doc) {
		Hashtable<String, Event> events = doc.events;
		// check if regulation takes another regulation
		for (Event e: events.values()){
			if (e.tspan.text.equals("levels")){
				for (Argument arg: e.args){
					System.err.println("[INDUCTION] "+doc.fileId+"\t"+e.eid+":"+e.eventType+"\t"+arg.getId()+":"+arg.tspan.text);
				}
			}
		}
	}
	
	/**
	 * check if a reg can take another reg as argument
	 * @param doc
	 */
	private static void checkRegArg(BioNLPDocument doc) {
		Hashtable<String, Event> events = doc.events;
		// check if regulation takes another regulation
		for (Event e: events.values()){
			if (e.eventType.toString().endsWith("egulation")){
				for (Argument arg: e.args){
					if (arg.tspan.textType.toString().endsWith("egulation")){
						System.err.println("[REG_HAS_REG] "+doc.fileId+"\t"+e.eid+":"+e.tspan.text+"\t"+arg.getId()+":"+arg.tspan.text);
					}
				}
			}
		}
	}
	
	public static void fix(String filename){
		String text = FileUtil.getTextFromFile(filename);
		String[] sublines = text.split("\n");
		StringBuilder sb = new StringBuilder();
		Hashtable<Event, Integer> eventMap = new Hashtable<Event,Integer>();
		// ??
		ArrayList<String> toReplace = new ArrayList<String>(), replacement = new ArrayList<String>();
		for (int j = 0; j < sublines.length; j++){
			if (sublines[j].startsWith("T")){
				sb.append(sublines[j]+"\n");
				continue;
			}
			if (sublines[j].trim().length()!=0){
				int idx = sb.toString().indexOf(sublines[j].substring(sublines[j].indexOf("\t")));
				if (idx == -1){
					// previous text does not contain the same event description
					sb.append(sublines[j]+"\n");
				}
				else{
					// find the id (trueId) of the previous same event description
					// replace all the following ids (falseId) with (trueId) 
					int startIdx = sb.toString().indexOf("\n", idx -6);
					int trueId = Integer.parseInt(sb.toString().substring(startIdx+2, idx));
					int falseId = Integer.parseInt(sublines[j].substring(1, sublines[j].indexOf("\t")));
					for (Entry<Event, Integer> entry: eventMap.entrySet()){
						if (entry.getValue() == falseId){
							eventMap.put(entry.getKey(), trueId);
							break;
						}
					}
					toReplace.add("E"+falseId);
					replacement.add("E"+trueId);
				}
			}
		}
		text = sb.toString().replaceAll("\n+", "\n")+"\n";
		for (int i = 0; i < toReplace.size(); i++){
			System.err.println(filename+" replacing .."+toReplace.get(i)+"  "+ replacement.get(i));
			text = text.replace(toReplace.get(i)+" ", replacement.get(i)+" ");
			text = text.replace(toReplace.get(i)+"\n", replacement.get(i)+"\n");
		}
		FileUtil.writeTextToFile(text.substring(0, text.length()-1), filename+".tmp");
	}
	
	
	public static void checkUTurkuTrigger(){
		Count sum = new Count();
		String path = "data/BioNLP09/dev/";
		String path2 = "uturku/TurkuEventExtractionSystem-1.0/output2/geniaformat/";
		for (String filename :  new File(path).list()){
			//			String filename = "9799798.txt";
			if (filename.endsWith(".txt")){

				//				sum.add(processDocument(path, filename.replace(".txt", "")));
				String prefix = filename.replace(".txt", "");
				BioNLPDocument doc = new BioNLPDocument(path, prefix);
				String text = FileUtil.getTextFromFile(path + prefix +".a2");
				String text2 = FileUtil.getTextFromFile(path2+prefix+".a2.t1");
				String[] lines = text.split("\n");
				ArrayList<TSpan> truth = new ArrayList<TSpan>(), predictions = new ArrayList<TSpan>();
				for (String line: lines){
					if (line.startsWith("T")){
						TSpan ts = new TSpan();
						ts.parse(line);
						truth.add(ts);
					}
				}
				lines = text2.split("\n");
				for (String line: lines){
					if (line.startsWith("T")){
						TSpan ts = new TSpan();
						ts.parse(line);
						predictions.add(ts);
					}
				}
				HashSet<TSpan> visited = new HashSet<TSpan>(), hit = new HashSet<TSpan>();;
				int num = 0;
				for (TSpan ts: truth){
					for(TSpan ts2: predictions){
						if (!visited.contains(ts)){
							if (ts2.matchTruth(ts, doc)){
								num++;
								visited.add(ts);
								hit.add(ts2);
								break;
							}
						}
					}
				}
				for(TSpan ts2: predictions){
					if (!hit.contains(ts2)){
						System.err.println("[WRONG]"+"\t"+prefix+"\t"+ts2);
					}
				}
				for (TSpan ts: truth){
					if (!visited.contains(ts)){
						System.err.println("[MISSED]\t"+prefix+"\t"+ts);
					}
				}
				System.err.println(prefix + "\t" + num +"\t"+ truth.size()+ "\t"+predictions.size());
			}
		}
		System.out.println(sum);
	}
}
