package tagger;

import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

import segmenter.Segmenter;
import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;

public class Tagger {
	
	static final private String PATH_PREFIX = 
			"D:/DH/software/stanford-postagger-full-2012-07-09/";
	
	private MaxentTagger tagger;
	static private Tagger instance = null;
	
	public static void main(String[] args) {
		String testString = "薄熙来自从担任商务部长以来，一直兢兢业业。";
//		List<TaggedWord> words = new Tagger().tagRawText(testString);
//		System.out.println(Sentence.listToString(words, false));
		
		List<String> segmentStringList = Segmenter.getInstance().segment(testString);
		StringBuffer segmentString = new StringBuffer();
		for (String string : segmentStringList) {
			segmentString.append(string + " ");
		}
//		System.out.println(segmentString.toString().trim());
		
//		Tagger tagger = new Tagger();
		
//		for (List<HasWord> sentence : sentences) {
//			ArrayList<TaggedWord> tSentence = tagger.tagSentence(sentence);
////			System.out.println(tSentence.get(1).tag());
//			System.out.println(Sentence.listToString(tSentence, false));
//		}
	}
	
	public MaxentTagger getTagger() {
		return tagger;
	}
	
	private Tagger() {		
		try {
			tagger = new MaxentTagger(PATH_PREFIX + "models/chinese.tagger");
		} catch (IOException e) {
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
		}
	}
	
	static public Tagger getInstance() {
		return (instance == null) ? (instance = new Tagger()) : instance;
	}
	
	public List<TaggedWord> tag(List<String> wordStrings) {
		List<HasWord> words = Sentence.toWordList(wordStrings);
		return tagger.tagSentence(words);
	}
	
	public List<List<TaggedWord>> tag(String toTag) {
		List<List<TaggedWord>> res = new ArrayList<List<TaggedWord>>();
		List<List<HasWord>> sentences = MaxentTagger.
				tokenizeText(new StringReader(toTag));
		for (List<HasWord> sentence : sentences) {
			List<TaggedWord> tSentence = tagger.tagSentence(sentence);
			res.add(tSentence);
		}
		return res;
	}
	
	public List<TaggedWord> tagRawText(String rawText) {
		List<String> sentences = Segmenter.getInstance().segment(rawText);
		return tag(sentences);
	}
}
