package org.wb.keyword;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;

import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.wltea.analyzer.lucene.IKTokenizer;

/**
 * @author 林良益
 * 
 */
public class IKTokenerUtil {

	public String splitstoken(String content) {
		StringBuffer sBuffer = new StringBuffer();
		IKTokenizer tokenizer = new IKTokenizer(new StringReader(content), true);
		try {
			while (tokenizer.incrementToken()) {
				TermAttribute termAtt = tokenizer.getAttribute(TermAttribute.class);
				sBuffer.append(termAtt.term() + ",");
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		return sBuffer.toString();
	}

	public static void main(String[] args) {
		HashMap<String, String> termFreqMap = new HashMap<String, String>();
		try {
			List<String> readLines = IOUtils.readLines(new FileInputStream("wb_content3.txt"), "utf-8");
			StringBuffer sb = new StringBuffer();
			for (String row : readLines) {
				int indexOf = row.indexOf(",");
				String col1 = row.substring(0, indexOf);
				String col2 = row.substring(indexOf, row.length() - 1);
				col2 = col2.replaceAll("(http://[\\w\\/\\.]]*)", "");
				String splitstoken = new IKTokenerUtil().splitstoken(col2);
				sb.append(col1 + ",\"" + splitstoken + "\"\n");
				// 基于内容切词
				IOUtils.write(sb.toString(), new FileOutputStream("work_splits.txt"));
				sb.setLength(0);
				// 将相同词的id们汇总下
				String[] terms = splitstoken.split("\\,");
				for (int i = 0; i < terms.length; i++) {
					String val = termFreqMap.get(terms[i]);
					if (terms[i].length() < 2) {
						continue;
					}
					if (StringUtils.isNotEmpty(val) && !val.contains(col1)) {
						val += "," + col1;
					} else if (StringUtils.isEmpty(val)) {
						val = col1;
					}
					termFreqMap.put(terms[i], val);
				}
			}
			Set<Entry<String, String>> entrySet = termFreqMap.entrySet();
			List<String> keywordfrqList = new ArrayList<String>();
			for (Entry<String, String> entry : entrySet) {
				String value = entry.getValue();
				int length = value.split(",").length;
				if (length > 100) {
					keywordfrqList.add(entry.getKey() + "@@@" + length + "$$$" + value);
				}
			}

			// id们越多代表，高热词
			Collections.sort(keywordfrqList, new Comparator<String>() {
				@Override
				public int compare(String o1, String o2) {
					String[] split1 = o1.split("\\$\\$\\$");
					String[] split2 = o2.split("\\$\\$\\$");
					return split1[1].length() - split2[1].length();
				}
			});

			// 输出高热词
			StringBuffer sBuffer = new StringBuffer();
			for (String row : keywordfrqList) {
				sBuffer.append(row + "\n");
			}
			IOUtils.write(sBuffer.toString(), new FileOutputStream("work_feqs.txt"));
			sBuffer.setLength(0);
			StringBuffer sBuffer2 = new StringBuffer();
			Random random = new Random(255);
			StringBuffer sBuffer3 = new StringBuffer();
			int edgeid = 0;
			for (int i = 0; i < keywordfrqList.size(); i++) {
				String row = keywordfrqList.get(i);
				int index1 = row.indexOf("@@@");
				int index2 = row.indexOf("$$$");
				sBuffer2.append(String.format("<node id=\"%s\" label=\"%s\"><viz:size value=\"%s\"/><viz:color b=\"%s\" g=\"%s\" r=\"%s\"/><viz:position x=\"%s\" y=\"%s\" z=\"%s\"/></node>\n", i, row.substring(0, index1), Integer.parseInt(row.substring(index1 + 3, index2)) / 10,
						random.nextInt(255), random.nextInt(255), random.nextInt(255), random.nextInt(1000), random.nextInt(500), 0));
				String[] ids = row.substring(index2 + 3).split("\\,");
				for (int j = 0; j < ids.length; j++) {
					for (int k = i + 1; k < keywordfrqList.size(); k++) {
						String row2 = keywordfrqList.get(k);
						int _index2 = row2.indexOf("$$$");
						String _ids = row2.substring(_index2 + 3);
						if (_ids.contains(ids[j])) {
							sBuffer3.append(String.format("<edge id=\"%s\" source=\"%s\" target=\"%s\"/>\n", edgeid++, i, k));
						}
					}
				}

			}
			IOUtils.write(sBuffer2.toString(), new FileOutputStream("work_nodes.txt"));
			sBuffer2.setLength(0);
			IOUtils.write(sBuffer3.toString(), new FileOutputStream("work_edges.txt"));
			sBuffer3.setLength(0);
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}

	}
}
