package com.tangpian.sna.core.analysis.lda;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.tangpian.sna.core.analysis.Analyzer;
import com.tangpian.sna.core.analysis.lda.algorithm.Estimator;
import com.tangpian.sna.core.analysis.lda.algorithm.LDAOption;
import com.tangpian.sna.core.analysis.lda.dao.MappingDao;
import com.tangpian.sna.core.analysis.lda.dao.TopicDao;
import com.tangpian.sna.core.analysis.lda.model.ItemTopic;
import com.tangpian.sna.core.analysis.lda.model.Topic;
import com.tangpian.sna.core.model.Item;
import com.tangpian.sna.core.tokenizer.AnsjTokenizer;
import com.tangpian.sna.core.tokenizer.Tokenizer;
import com.tangpian.sna.core.util.FileUtil;

@Component
public class LdaAnalyzer<T extends Item> implements Analyzer<T> {

	private static Logger logger = LoggerFactory.getLogger(LdaAnalyzer.class);

	@Autowired
	private TopicDao topicDao;

	@Autowired
	private MappingDao mappingDao;

	private String ldaDir;

	// default lda running value;
	private String tokenizedFile = "tmp.txt";
	private String modelName = "model-final";
	/**
	 * 话题数量
	 */
	private int topicNumber = 50;

	/**
	 * alpha值
	 */
	private double alpha = 2;
	private double beta = 0.4;
	private int niters = 100;
	private boolean inf = false;
	private boolean est = true;

	public int getTopicNumber() {
		return topicNumber;
	}

	public void setTopicNumber(int topicNumber) {
		this.topicNumber = topicNumber;
	}

	public double getAlpha() {
		return alpha;
	}

	public void setAlpha(double alpha) {
		this.alpha = alpha;
	}

	public double getBeta() {
		return beta;
	}

	public void setBeta(double beta) {
		this.beta = beta;
	}

	public int getNiters() {
		return niters;
	}

	public void setNiters(int niters) {
		this.niters = niters;
	}

	public boolean isInf() {
		return inf;
	}

	public void setInf(boolean inf) {
		this.inf = inf;
	}

	public boolean isEst() {
		return est;
	}

	public void setEst(boolean est) {
		this.est = est;
	}

	private Tokenizer tokenizer = new AnsjTokenizer();

	private LDAOption option = new LDAOption();

	private void init() {
		String inputDir = "/tmp";
		try {
			inputDir = File.createTempFile("/lda", "txt").getParent();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

		ldaDir = inputDir + "/lda/";

		option.dir = ldaDir;
		option.dfile = tokenizedFile;
		option.est = est;
		// /option.estc = true;
		option.inf = inf;
		option.modelName = modelName;
		option.niters = niters;
		option.K = topicNumber;
		option.alpha = alpha;
		option.beta = beta;
	}

	private void perpareFile(List<T> origins) {
		logger.debug("write lda data file to :" + ldaDir + tokenizedFile);

		List<String> tokenizedContents = new ArrayList<String>();
		for (Item item : origins) {
			String content = item.getContent();
			tokenizedContents.add(tokenizer.segmentation(content));
		}

		try {
			File dir = new File(ldaDir);
			dir.delete();
			dir.mkdirs();
			BufferedWriter writer = new BufferedWriter(new FileWriter(ldaDir
					+ tokenizedFile));
			writer.write(tokenizedContents.size() + "\n");
			for (String string : tokenizedContents) {
				writer.write(string + "\n");
			}
			writer.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	public void analysis(List<T> items, String taskId) {

		init();
		perpareFile(items);

		Estimator estimator = new Estimator();
		estimator.init(option);
		estimator.estimate();

		LdaResult ldaResult = parseResult(items, taskId);

		topicDao.save(ldaResult.getTopics());
		mappingDao.save(ldaResult.getMappings());
	}

	private LdaResult parseResult(List<T> items, String taskId) {
		String topicFilePath = ldaDir + "/" + modelName + ".twords";
		String mappingFilePath = ldaDir + "/" + modelName + ".theta";

		List<String> topicLines = FileUtil.readFile(topicFilePath, "utf-8");
		List<Topic> topics = new ArrayList<Topic>();
		Topic tmp = null;

		for (int i = 0; i < topicLines.size(); i++) {
			String line = topicLines.get(i);
			if (!line.startsWith(" ")) {
				if (i != 0) {
					topics.add(tmp);
				}
				tmp = new Topic();
				tmp.setTaskId(taskId);
				tmp.setId(i);
			} else {
				tmp.addKeyword(line);
			}
			if (i == topicLines.size() - 1) {
				topics.add(tmp);
			}
		}

		List<String> mappingLines = FileUtil.readFile(mappingFilePath, "utf-8");
		List<ItemTopic> mappings = new ArrayList<ItemTopic>();
		for (int i = 0; i < mappingLines.size(); i++) {
			ItemTopic mapping = new ItemTopic();
			mapping.setTaskId(taskId);
			mapping.setItemId(items.get(i).getId());
			String line = mappingLines.get(i);
			String[] probabilitis = line.split(" ");
			for (int j = 0; j < probabilitis.length; j++) {
				String string = probabilitis[j];
				mapping.addMapping(j, Double.parseDouble(string));
			}
			mappings.add(mapping);
		}

		return new LdaResult(topics, mappings);
	}
}
