package zsd.reiview.experiment.crf;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import zsd.review.split.IctSpliting;

import com.aliasi.chunk.Chunking;
import com.aliasi.corpus.ListCorpus;
import com.aliasi.corpus.XValidatingObjectCorpus;
import com.aliasi.dict.DictionaryEntry;
import com.aliasi.dict.ExactDictionaryChunker;
import com.aliasi.dict.MapDictionary;
import com.aliasi.dict.TrieDictionary;
import com.aliasi.test.unit.tokenizer.CharacterTokenizerFactoryTest;
import com.aliasi.tokenizer.CharacterTokenizerFactory;
import com.aliasi.tokenizer.TokenizerFactory;
import com.aliasi.util.AbstractExternalizable;
import com.aliasi.util.Arrays;
import com.aliasi.util.Files;

/**
 * @Title: CrfCopus.java
 * @Package zsd.reiview.experiment.crf
 * @Description: TODO(添加描述)
 * @author 朱圣代
 * @date 2011-10-31 下午02:35:08
 * @version V1.0
 */
public class CrfCorpus {

	/**
	 * @Title: main
	 * @Description:
	 * @param @param args
	 * @return void
	 * @throws
	 * @date 2011-10-31 下午02:35:08
	 */

	public static SentiDictionay sentiDictionay;
	public static Map<String, List<List<String>>> splitedLabel;
	static IctSpliting ictSpliting = new IctSpliting();

	public static IctSpliting getSpliting() {
		return ictSpliting;
	}

	public CrfCorpus() throws Exception, Exception {

	}

	public static void toBioTagging(Chunking chunking, List<String> tokenList, List<String> tagList, List<Integer> tokenStartList, List<Integer> tokenEndList) {

		tokenList.addAll(ictSpliting.splitToWords(chunking.charSequence().toString()));
		tagList.addAll(ictSpliting.getPosList());
		// tokenStartList = new ArrayList<Integer>( );
		// tokenEndList = new ArrayList<Integer>( );
		int[] startPos = ictSpliting.tokenFirstPos;
		for (int i = 0; i < startPos.length; i++) {
			tokenStartList.add(startPos[i]);
			tokenEndList.add(startPos[i] + tokenList.get(i).length());
		}
	}

	public static class Property implements Serializable {

		private static final long serialVersionUID = -1189974842651334237L;
		public String cat;
		public String sourSent;
		public String sent;
		public List<Pair> pairs = new ArrayList<Pair>();

		private Object writeReplace() {
			return new Externalizer<String>(this);
		}

		private static class Externalizer<F> extends AbstractExternalizable {
			// static final long serialVersionUID = -6351978792499636468L;
			private final Property mproperty;

			public Externalizer(Property property) {
				mproperty = property;
			}

			public Externalizer() {
				this(null);
			}

			@Override
			public Object read(ObjectInput in) throws IOException, ClassNotFoundException {
				Property property = new Property();
				property.cat = in.readUTF();
				property.sourSent = in.readUTF();
				property.sent = in.readUTF();
				Set<Pair> set = new HashSet<Pair>();
				property.pairs = (List<Pair>) in.readObject();
				return property;
			}

			@Override
			public void writeExternal(ObjectOutput out) throws IOException {
				out.writeUTF(mproperty.cat);
				out.writeUTF(mproperty.sourSent);
				out.writeUTF(mproperty.sent);
				// out.writeUTF(new String(mproperty.cat.getBytes("GBK"),"UTF8"));
				// out.writeUTF(new String(mproperty.sourSent.getBytes("gbk"),"UTF8"));
				AbstractExternalizable.compileOrSerialize(mproperty.pairs, out);
			}
		}
	}

	public static class Pair implements Serializable {

		private static final long serialVersionUID = 6568558193402121969L;
		public String target;
		public String phrase;
		public int score;

		private Object writeReplace() {
			return new Externalizer<String>(this);
		}

		private static class Externalizer<F> extends AbstractExternalizable {

			private final Pair mpair;

			public Externalizer(Pair pair) {
				mpair = pair;
			}

			public Externalizer() {
				this(null);
			}

			@Override
			public Object read(ObjectInput in) throws IOException, ClassNotFoundException {
				Pair pair = new Pair();
				pair.target = in.readUTF();
				pair.phrase = in.readUTF();
				pair.score = in.readInt();
				return pair;
			}

			@Override
			public void writeExternal(ObjectOutput out) throws IOException {

				// out.writeUTF(new String(mpair.target.getBytes("GBK"),"UTF-8"));
				// out.writeUTF(new String(mpair.phrase.getBytes("GBK"),"UTF-8"));
				out.writeUTF(mpair.target);
				out.writeUTF(mpair.phrase);
				// out.writeChars(mpair.target);
				// out.writeChars(mpair.phrase);
				out.writeInt(mpair.score);
			}
		}
	}

	public static class Task3Corpus {
		public String mapPath = "myDataBase"+File.separator +"corpus"+File.separator+"task3.map";
		public String task3Source = "myDataBase"+File.separator+"COAE2011Answer"+File.separator+"task3.txt";
		TokenizerFactory mtokenizerFactory = CharacterTokenizerFactory.INSTANCE;

		public Task3Corpus(TokenizerFactory tokenizerFactory) {
			mtokenizerFactory = tokenizerFactory;
		}

		public Task3Corpus() {

		}

		public List<Property> getTask3CorpusFromLocal(String path, boolean update) throws Exception {
			ArrayList<Property> properties = new ArrayList<Property>();
			File mapFile = new File(mapPath);
			if (!update) {
				properties = (ArrayList<Property>) AbstractExternalizable.readObject(mapFile);
				return properties;
			}
			File file = new File(path);
			String all = Files.readFromFile(file, "gbk");
			all = all.replaceAll("\\?D", "\nD");
			// all = all.replaceAll("\\?", "\t");
			String[] lines = all.split("\n");
			for (int i = 0; i < lines.length; i++) {
				String line = lines[i];
				String[] splits = line.split("[\t]+");
				if ((splits.length - 2) % 3 != 0) {
					line = line.replaceAll("\\?F", "\nF");
					line = line.replaceAll("\\?E", "\nE");
					line = line.replaceAll("\\?D", "\nD");
					line = line.replaceAll("\\?", "\t");
					splits = line.split("[\t]+");
					// System.err.println(Arrays.arrayToString(splits));
					// System.err.println(splits.length + "  "+splits[0] + sent);
				}
				if ((splits.length - 2) % 3 != 0) {
					// System.err.println(Arrays.arrayToString(splits));
				}
				if (splits.length <= 2)
					continue;
				String sent = splits[1];
				sent = sent.replaceAll("[ \\./]+", "");
				sent = sent.replaceAll("[ ]", "");
				// /sent = sent.replaceAll("[0-9\\.]+", "");
				sent = sent.replaceAll("[/]+", "");
				// System.out.println(splits[0] + sent);
				Property property = new Property();
				property.cat = splits[0];
				property.sourSent = splits[1];
				property.sent = sent;

				int pairNum = (splits.length - 2) / 3;

				for (int k = 0; k < pairNum; k++) {

					Pair pair = new Pair();
					pair.target = splits[k * 3 + 2].replaceAll("[ /]+", "");
					pair.phrase = splits[k * 3 + 3].replaceAll("[ /]+", "");
					String sentiStr = splits[k * 3 + 4].trim();
					if (sentiStr.endsWith("\r"))
						sentiStr = sentiStr.substring(0, sentiStr.length() - 2);
					int senti = Integer.parseInt(sentiStr);
					pair.score = senti;
					property.pairs.add(pair);
				}

				properties.add(property);
			}

			AbstractExternalizable.serializeTo(properties, mapFile);
			// maps = (HashMap<String, Property>)
			// AbstractExternalizable.readObject(mapFile);
			return properties;
		}

		String[] corpusPath = { "G:\\1\\corpus\\task3.target", "G:\\1\\corpus\\task3.phrase", "G:\\1\\corpus\\task3.both" };

		public XValidatingObjectCorpus<Chunking> getTask3Corpus(int split, int maxNum, String type, int folds, boolean tagTarget, boolean tagPhrase, boolean update) throws Exception {
			XValidatingObjectCorpus<Chunking> xValidatingObjectCorpus = new XValidatingObjectCorpus<Chunking>(folds);
			splitedLabel = new HashMap<String, List<List<String>>>(8192);
			sentiDictionay = SentiDictionay.getInstance();

			String path;
			if (tagPhrase && tagTarget)
				path = corpusPath[2];
			else if (tagTarget) {
				path = corpusPath[0];
			} else if (tagPhrase) {
				path = corpusPath[1];
			} else {
				return xValidatingObjectCorpus;
			}

			File outfile = new File(path);
			if (!update)
				return (XValidatingObjectCorpus<Chunking>) AbstractExternalizable.readObject(outfile);

			List<Property> properties = getTask3CorpusFromLocal(task3Source, false);
			int couter = 0;
			int start = 0;
			for (Property property : properties) {
				if (!property.cat.startsWith(type))
					continue;
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				String sent = property.sent;
				List<String> tokenList = new ArrayList<String>();
				// List tagList = new ArrayList<String>();

				ictSpliting.splitToWords(sent);

				tokenList = ictSpliting.getWordList();
				String tokenString = "";
				for (int j = 0; j < tokenList.size(); j++) {
					// tagList.add("O");
					tokenString = tokenString + tokenList.get(j);
				}

				if (!tokenString.equals(sent) || tokenString.length() != sent.length()) {
					// System.err.println("分词后      " + tokenString);
					// System.out.println("未分词      " + sent);
					continue;
				}

				property.sent = tokenString;
				List<String> posList = ictSpliting.getPosList();
				List<String> isSentiList = new ArrayList<String>();
				List<List<String>> labels = new ArrayList<List<String>>();

				if (posList.size() != tokenList.size()) {
					System.err.println("分词后      " + tokenString);
				}
				labels.add(posList);
				for (int m = 0; m < tokenList.size(); m++) {
					isSentiList.add(isSenti(tokenList.get(m)));
				}
				labels.add(isSentiList);
				splitedLabel.put(tokenString, labels);

				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					DictionaryEntry<String> phraseEntry = new DictionaryEntry<String>(pair.phrase, "P", pair.score);

					if (tagTarget)
						dic.addEntry(targetEntry);
					if (tagPhrase)
						dic.addEntry(phraseEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				xValidatingObjectCorpus.handle(chunking);
				couter++;
				if (couter >= maxNum)
					break;
			}
			return xValidatingObjectCorpus;

		}

		public ListCorpus<Chunking> getTestTask3Corpus(int split, int maxNum, String type, boolean tagTarget, boolean tagPhrase) throws Exception {
			ListCorpus<Chunking> listCorpus = new ListCorpus<Chunking>();
			/*
			 * splitedLabel = new HashMap<String, List<List<String>>>(8192);
			 * sentiDictionay = SentiDictionay.getInstance();
			 */

			String path;
			if (tagPhrase && tagTarget)
				path = corpusPath[2];
			else if (tagTarget) {
				path = corpusPath[0];
			} else if (tagPhrase) {
				path = corpusPath[1];
			} else {
				return listCorpus;
			}

			List<Property> properties = getTask3CorpusFromLocal(task3Source, false);
			int couter = 0;
			int start = 0;
			for (Property property : properties) {
				if (!property.cat.startsWith(type))
					continue;
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				String sent = property.sent;
				List<String> tokenList = new ArrayList<String>();
				ictSpliting.splitToWords(sent);

				tokenList = ictSpliting.getWordList();
				String tokenString = "";
				for (int j = 0; j < tokenList.size(); j++) {
					tokenString = tokenString + tokenList.get(j);
				}
				if (!tokenString.equals(sent) || tokenString.length() != sent.length()) {
					continue;
				}

				property.sent = tokenString;
				List<String> posList = ictSpliting.getPosList();
				List<String> isSentiList = new ArrayList<String>();
				List<List<String>> labels = new ArrayList<List<String>>();

				if (posList.size() != tokenList.size()) {
					System.err.println("分词后      " + tokenString);
				}
				labels.add(posList);
				for (int m = 0; m < tokenList.size(); m++) {
					isSentiList.add(isSenti(tokenList.get(m)));
				}
				labels.add(isSentiList);
				splitedLabel.put(tokenString, labels);

				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					DictionaryEntry<String> phraseEntry = new DictionaryEntry<String>(pair.phrase, "P", pair.score);

					if (tagTarget)
						dic.addEntry(targetEntry);
					if (tagPhrase)
						dic.addEntry(phraseEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				listCorpus.addTest(chunking);
				couter++;
				if (couter >= maxNum)
					break;
			}
			return listCorpus;
		}

		public ListCorpus<Chunking> getTask3AllCorpus(int split, boolean tagTarget, boolean tagPhrase) throws Exception {
			ListCorpus<Chunking> listCorpus = new ListCorpus<Chunking>();
			String path;
			if (tagPhrase && tagTarget)
				path = corpusPath[2];
			else if (tagTarget) {
				path = corpusPath[0];
			} else if (tagPhrase) {
				path = corpusPath[1];
			} else {
				return listCorpus;
			}

			int start = 0;
			List<Property> properties = getTask3CorpusFromLocal(task3Source, false);
			for (Property property : properties) {
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					DictionaryEntry<String> phraseEntry = new DictionaryEntry<String>(pair.phrase, "P", pair.score);

					if (tagTarget)
						dic.addEntry(targetEntry);
					if (tagPhrase)
						dic.addEntry(phraseEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				listCorpus.addTrain(chunking);
			}
			return listCorpus;
		}

		public XValidatingObjectCorpus<Chunking> getTask3AllCorpus(int split, int folds, boolean tagTarget, boolean tagPhrase, boolean update) throws Exception {
			XValidatingObjectCorpus<Chunking> xValidatingObjectCorpus = new XValidatingObjectCorpus<Chunking>(folds);

			String path;
			if (tagPhrase && tagTarget)
				path = corpusPath[2];
			else if (tagTarget) {
				path = corpusPath[0];
			} else if (tagPhrase) {
				path = corpusPath[1];
			} else {
				return xValidatingObjectCorpus;
			}

			File outfile = new File(path);
			if (!update)
				return (XValidatingObjectCorpus<Chunking>) AbstractExternalizable.readObject(outfile);

			int start = 0;
			List<Property> properties = getTask3CorpusFromLocal(task3Source, false);
			for (Property property : properties) {
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					DictionaryEntry<String> phraseEntry = new DictionaryEntry<String>(pair.phrase, "P", pair.score);

					if (tagTarget)
						dic.addEntry(targetEntry);
					if (tagPhrase)
						dic.addEntry(phraseEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				xValidatingObjectCorpus.handle(chunking);
			}

			return xValidatingObjectCorpus;

		}

		public TokenizerFactory getMtokenizerFactory() {
			return mtokenizerFactory;
		}

		public void setMtokenizerFactory(TokenizerFactory mtokenizerFactory) {
			this.mtokenizerFactory = mtokenizerFactory;
		}
	}

	private static boolean containInPhrase(int tokenStart, int tokenEnd, int pStart, int pEnd) {
		if ((tokenStart >= pStart && tokenStart < pEnd) || (tokenEnd >= pStart && tokenEnd < pEnd))
			return true;
		return false;
	}

	public static Map<String, List<List<String>>> getPosList() {
		return splitedLabel;
	}

	public static String isSenti(String p) {
		String cString = sentiDictionay.findPhraseCat(p);
		return (cString.equals("B") || cString.equals("G")) ? "Y" : "N";
	}

	public static class COAE2009TargetCorpus {
		//public String mapPath = "G:\\1\\corpus\\target2009.map";
		public String mapPath = "myDataBase" + File.separator+"corpus" + File.separator +"target2009.map";
		//public String coae2009TargetSource = "G:\\1\\2009_Task4_Corpus\\Task4_1.qrels";
		public String coae2009TargetSource = "myDataBase" + File.separator +"2009_Task4_Corpus"+File.separator+"Task4_1.qrels";
		TokenizerFactory mtokenizerFactory = CharacterTokenizerFactory.INSTANCE;

		public List<Property> getHashFromLocal(boolean update) throws Exception, ClassNotFoundException {

			ArrayList<Property> properties = new ArrayList<Property>();
			File mapFile = new File(mapPath);
			if (!update) {
				properties = (ArrayList<Property>) AbstractExternalizable.readObject(mapFile);
				return properties;
			}
			File file = new File(coae2009TargetSource);
			String all = Files.readFromFile(file, "gbk");
			all = all.replaceAll("\\?D", "\nD");
			// all = all.replaceAll("\\?", "\t");
			String[] lines = all.split("\n");
			for (int i = 0; i < lines.length; i++) {
				String line = lines[i];
				String[] splits = line.split("[\t]+");
				if ((splits.length - 1) % 2 != 0) {
					line = line.replaceAll("\\?F", "\nF");
					line = line.replaceAll("\\?E", "\nE");
					line = line.replaceAll("\\?D", "\nD");
					line = line.replaceAll("\\?", "\t");
					splits = line.split("[\t]+");
					// System.err.println(Arrays.arrayToString(splits));
					// System.err.println(splits.length + "  "+splits[0] + sent);
				}

				if (splits.length <= 2)
					continue;
				String sent = splits[0];
				sent = sent.replaceAll("[ \\./]+", "");
				sent = sent.replaceAll("[ ]", "");
				// sent = sent.replaceAll("[0-9\\.]+", "");
				sent = sent.replaceAll("[/]+", "");
				// System.out.println(splits[0] + sent);
				Property property = new Property();
				// property.cat = splits[0];
				property.sourSent = splits[0];
				property.sent = sent;

				int pairNum = (splits.length - 1) / 2;

				for (int k = 0; k < pairNum; k++) {
					Pair pair = new Pair();
					String target = splits[k * 2 + 1].replaceAll("[ /]+", "");
					if (target.equals("@@"))
						continue;
					pair.target = target;
					String sentiStr = splits[k * 2 + 2].trim();
					if (sentiStr.endsWith("\r"))
						sentiStr = sentiStr.substring(0, sentiStr.length() - 2);
					int senti = Integer.parseInt(sentiStr);
					pair.score = senti;
					property.pairs.add(pair);
				}

				if (property.pairs.size() == 0)
					continue;
				properties.add(property);
			}
			// AbstractExternalizable.serializeTo(properties, mapFile);
			return properties;
		}

		public XValidatingObjectCorpus<Chunking> get2009TargetCorpus(int split, int maxNum, int folds, boolean update) throws Exception {
			XValidatingObjectCorpus<Chunking> xValidatingObjectCorpus = new XValidatingObjectCorpus<Chunking>(folds);

			File outfile = new File(mapPath);
			List<Property> properties = getHashFromLocal(true);
			int start = 0;
			for (Property property : properties) {
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					dic.addEntry(targetEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				xValidatingObjectCorpus.handle(chunking);
				if (xValidatingObjectCorpus.size() >= maxNum)
					break;
			}
			return xValidatingObjectCorpus;
		}

		public ListCorpus<Chunking> get2009TrainTargetCorpus(int split, int maxNum) throws Exception {
			ListCorpus<Chunking> listCorpus = new ListCorpus<Chunking>();

			File outfile = new File(mapPath);
			List<Property> properties = getHashFromLocal(true);
			int start = 0;
			int k = 0;
			for (Property property : properties) {
				k++;
				if (start % split != 0) {
					start++;
					continue;
				}
				start++;
				MapDictionary<String> dic = new MapDictionary<String>();
				for (Pair pair : property.pairs) {
					DictionaryEntry<String> targetEntry = new DictionaryEntry<String>(pair.target, "T", pair.score);
					dic.addEntry(targetEntry);
				}
				ExactDictionaryChunker exactDictionaryChunker = new ExactDictionaryChunker(dic, mtokenizerFactory);
				Chunking chunking = exactDictionaryChunker.chunk(property.sent);
				listCorpus.addTrain(chunking);
				if (k >= maxNum)
					break;
			}
			return listCorpus;
		}
	}

	public static ListCorpus<Chunking> xtoListCorpus(XValidatingObjectCorpus<Chunking> xCorpus) {
		List<Chunking> list = xCorpus.getAllItem();
		ListCorpus<Chunking> listCorpus = new ListCorpus<Chunking>();
		for (Chunking chunking : list)
			listCorpus.addTest(chunking);
		return listCorpus;
	}

	public static XValidatingObjectCorpus<Chunking> listtoxCorpus(ListCorpus<Chunking> xCorpus) {
		List<Chunking> list = xCorpus.testCases();
		XValidatingObjectCorpus<Chunking> listCorpus = new XValidatingObjectCorpus<Chunking>(0);
		for (Chunking chunking : list)
			listCorpus.handle(chunking);
		return listCorpus;
	}

	public static void main(String[] args) throws Exception {
		Task3Corpus task3Corpus = new Task3Corpus();
		 task3Corpus.getTask3CorpusFromLocal("myDataBase"+File.separator+"COAE2011Answer"+File.separator+"task3.txt",
		 true);
		// task3Corpus.setMtokenizerFactory(new IctSplitTokenizerFactory());
		// task3Corpus.getTask3Corpus(2000,"D",10, true, false, true);

		COAE2009TargetCorpus corpus = new COAE2009TargetCorpus();
		corpus.getHashFromLocal(true);
		corpus.get2009TargetCorpus(1, 10000, 10, true);
	}

}
