package datamining.algorithms.prefixspan;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;

import settings.Settings;

import datamining.algorithms.Algorithm;
import datamining.miner.chain.CanonicalPruningStep;
import datamining.miner.chain.EmbeddingBasedGenerationStep;
import datamining.miner.chain.Extender;
import datamining.miner.chain.FrequencyPruningStep;
import datamining.miner.chain.GenerationPartialStep;
import datamining.miner.chain.GenerationStep;
import datamining.miner.chain.MiningStep;
import datamining.miner.chain.SearchLatticeNode;
import datamining.miner.enviroment.LocalEnvironment;
import datamining.miner.general.Fragment;
import datamining.miner.general.IntFrequency;
import datamining.miner.general.SequenceDataBase;
import datamining.sequence.Sequence;
import datamining.sequence.SequenceFactory;

public class PrefixSpan<ItemType> implements Algorithm<ItemType> {

	private IntFrequency minFreq = new IntFrequency(1);
	private Map<ItemType, PrefixCode<ItemType>> initials;
	private boolean keep = false;

	public PrefixSpan() {
		initials = new TreeMap<ItemType, PrefixCode<ItemType>>();
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		Settings settings = new Settings("conf/config.properties"); // 读取配置设置
		Settings.parse("conf/config.properties", settings);
		settings.factory = new PrefixSpanSequenceFactory();
		PrefixSpan<Character> prefixSpan = new PrefixSpan<Character>();
		prefixSpan.initialize(null, null, settings);
		System.out.println("-----------------------------------------------------------------------------------------");
		System.out.println("参数设置：\nMinimum support is "
				+ prefixSpan.minFreq);
		System.out.println("-----------------------------------------------------------------------------------------");
	}

	@SuppressWarnings("hiding")
	@Override
	public Collection<Fragment> initialize(
			List<Sequence<ItemType>> sequences,
			SequenceFactory<ItemType> factory, Settings settings) {
		// TODO 首先构建序列数据库
		final SequenceDataBase<ItemType> db = new SequenceDataBase<ItemType>(
				sequences, settings);

		minFreq = settings.minFreq;

		initials.clear();

		final ArrayList<ItemType> items = new ArrayList<ItemType>();
		items.addAll(db.frequentItems());// 计算频繁项

		final LocalEnvironment env = LocalEnvironment.create(settings,
				db, new PThreadEnvFactory());// 保存设置及数据

		// 初始化待扩展节点
		int i = 0;
		for (final Sequence<ItemType> sequence : sequences) {
			final PrefixSpanSequence<ItemType> seq = new PrefixSpanSequence<ItemType>(
					sequence, i, db.getFrequency(sequence));
			++i;
			seq.createInitials(initials);
		}

		// filter infrequent edges
		for (final Iterator<Entry<ItemType, PrefixCode<ItemType>>> eit = initials
				.entrySet().iterator(); eit.hasNext();) {
			final PrefixCode<ItemType> node = eit.next().getValue();
			if (settings.minFreq.compareTo(node.frequency()) > 0) {
				eit.remove();
			}
		}

		final PThreadEnvironment tenv = (PThreadEnvironment) env
				.getThreadEnv(0);
		final Map<ItemType, Fragment> nodeFragments = new HashMap<ItemType, Fragment>();

		return nodeFragments.values();
	}

	@Override
	public Iterator<SearchLatticeNode> initialNodes() {
		return new SearchLatticeNodeIterator(initials, !keep);
	}

	/*
	 * 构建扩展链
	 */
	@Override
	public Extender getExtender(int threadIdx) {
		// TODO Auto-generated method stub

		final LocalEnvironment<ItemType> env = LocalEnvironment.env();
		final PThreadEnvironment<ItemType> tenv = (PThreadEnvironment<ItemType>) env
				.getThreadEnv(threadIdx);
		final PrefixSpanExtender<ItemType> extender = new PrefixSpanExtender<ItemType>(
				tenv);
		MiningStep<ItemType> curFirst = extender;
		GenerationStep<ItemType> gen;

		if (env.embeddingBased) {//子序列的计数方式是否为embedding方式
			curFirst = gen = new EmbeddingBasedGenerationStep<ItemType>(
					curFirst);
		} else {
			curFirst = gen = new PrefixSpanGeneration<ItemType>(
					curFirst, tenv);
			// TODO:当非embedding时操作
		}
		curFirst = new FrequencyPruningStep<ItemType>(curFirst,
				env.minFreq, env.maxFreq);//判断当前序列频繁度是否大于最小支持度
		curFirst = new CanonicalPruningStep<ItemType>(curFirst);//判断当前序列是否应该被扩展

		GenerationPartialStep<ItemType> generationFirst = gen.getLast();
		
		generationFirst = new RightMostExtension<ItemType>(generationFirst, tenv);//扩展序列
		
		// insert generation chain
		gen.setFirst(generationFirst);

		// insert mining chain
		extender.setFirst(curFirst);

		return extender;
	}

	private class SearchLatticeNodeIterator implements
			Iterator<SearchLatticeNode> {
		final Iterator<Map.Entry<ItemType, PrefixCode<ItemType>>> entryit;

		final boolean del;// 在移除节点时，是否删除原始数据

		Map.Entry<ItemType, PrefixCode<ItemType>> last = null;

		SearchLatticeNodeIterator(
				final Map<ItemType, PrefixCode<ItemType>> initials,
				final boolean del) {
			entryit = initials.entrySet().iterator();
			this.del = del;
		}

		public boolean hasNext() {
			return entryit.hasNext();
		}

		public SearchLatticeNode next() {
			last = entryit.next();
			return last.getValue();
		}

		public void remove() {
			if (del) {
				// TODO：删除节点时删除原始数据
			}
			entryit.remove();
		}
	}
}
