package org.wangbao.service;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

import javax.servlet.ServletContext;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.struts2.ServletActionContext;
import org.junit.Test;
import org.wangbao.model.News;
import org.wltea.analyzer.lucene.IKAnalyzer;


import com.opensymphony.xwork2.ActionContext;

public class File2DocumentUtils {
	
	public static Analyzer analyzer = new IKAnalyzer();
	
	String senpath;

	public File2DocumentUtils() {
		ActionContext ac = ActionContext.getContext();

		ServletContext sc = (ServletContext) ac
				.get(ServletActionContext.SERVLET_CONTEXT);

		senpath = sc.getRealPath("/")+"WEB-INF/classes/conf/sensitive.txt";

		
		

	}

	public Document saveDocument(News news) throws Exception {

		Document doc = new Document();
		
		//在建立索引时将敏感词存入，如果发现该文章敏感域不为空，则高亮敏感词
		String title = news.getTitle();
		String content = news.getContent();
		String sensitiveWord ="";
		
		if (news.getId()!=null)
			doc.add(new Field("id", news.getId(), Store.YES, Index.NOT_ANALYZED));
		if (news.getTitle()!=null)
			doc.add(new Field("title", news.getTitle(), Store.YES, Index.ANALYZED));
		if (news.getContent()!=null)
			doc.add(new Field("content", news.getContent(), Store.YES,Index.ANALYZED));
		if (news.getHref()!=null)
			doc.add(new Field("href", news.getHref(), Store.YES, Index.NO));
		if (news.getJournalId()!=null)
			doc.add(new Field("journal", news.getJournalId(), Store.YES, Index.NOT_ANALYZED));
		if (news.getVersionId()!=null)
			doc.add(new Field("version", news.getVersionId(), Store.YES, Index.NOT_ANALYZED));
		if (news.getBlockId()!=null)
			doc.add(new Field("block", news.getBlockId(), Store.YES, Index.NOT_ANALYZED));
		
		List wordgroup = querySensitiveWord();
		
		if (content!=null&&title!=null){
			for (int i=0;i<wordgroup.size();i++) {
				if (content.contains((String)wordgroup.get(i)) || title.contains((String)wordgroup.get(i)) ){
					sensitiveWord += (String)wordgroup.get(i)+";";
					content = content.replaceAll((String)wordgroup.get(i), "");
				}
			}
			doc.add(new Field("sensitive", sensitiveWord,Store.YES, Index.ANALYZED));
		}
		//analyze(analyzer,news.getContent());

		return doc;
	}

	public static String readFileContent(File file) {
		try {
			BufferedReader reader = new BufferedReader(new InputStreamReader(
					new FileInputStream(file)));
			StringBuffer content = new StringBuffer();

			for (String line = null; (line = reader.readLine()) != null;) {
				content.append(line).append("\n");
			}

			return content.toString();
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	public static void printDocumentInfo(Document doc) {
		// Field f = doc.getField("name");
		// f.stringValue();
		System.out.println("------------------------------");
		System.out.println("title     = " + doc.get("title"));
		System.out.println("content  = " + doc.get("content"));
		System.out.println("block  = " + doc.get("block"));
		System.out.println("href     = " + doc.get("href"));
		System.out.println("sensitive     = " + doc.get("sensitive"));
	}
	
	public static StringBuffer printJsonInfo(Document doc) {
		
		StringBuffer sb = new StringBuffer();
		sb.append("title:" + doc.get("title"));
		sb.append("content:" + doc.get("content"));
		sb.append("href:" + doc.get("href"));
		sb.append("sensitive:" + doc.get("sensitive"));
		sb.append("block:" + doc.get("block"));
		return sb;
	}
	

	public static void analyze(Analyzer analyzer, String text) throws Exception {

		TokenStream tokenStream = analyzer.tokenStream("content", new StringReader(text));
		for (Token token = new Token(); (token = tokenStream.next(token)) != null;) {
			System.out.println("==========word========:"+token);
		}
	}
	
	
	public void markSensitiveContent(){
		
	}
	
	public void insertSensitiveWord(String word){
		File file = new File(senpath);
		try {
			FileOutputStream fw = new FileOutputStream(file,true);
			fw.write(word.getBytes("utf-8"));
			fw.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	
	@Test
	public void testInsert(){
		insertSensitiveWord("台独");
		//new IndexBuilder().insertSensitiveWord("台独");
		querySensitiveWord();
	}
	
	
	public List querySensitiveWord(){
		
		File file = new File(senpath);
		try {
			BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file),"utf-8"));
			List content = new ArrayList();

			for (String line = null; (line = reader.readLine()) != null;) {
				content.add(line);
			}
			return content;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}		
	}
}
