package crawler;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import asa.PostBucket;

import parser.ParserAnnotation;
import parser.ParserPipeline;

import corpus.SentPolarityCorpus;

import forum.ForumFactory;
import forum.ForumPost;

public class CorpusCrawler extends Crawler {
	private SentPolarityCorpus corpus;
	private int domain;
	private String[] domains;

	public CorpusCrawler(ForumFactory fac, ParserPipeline pp,
			PostBucket bucket, SentPolarityCorpus corpus) {
		super("Corpus", fac, pp, bucket);
		this.corpus = corpus;
		Set<String> corpusNames = corpus.getCorpusNames();
		domains = corpusNames.toArray(new String[corpusNames.size()]);
	}

	@Override
	public ArrayList<ForumPost> crawl() throws IOException {
		ArrayList<ForumPost> f = new ArrayList<ForumPost>();
		Map<String, String> corp = corpus.getCorpus(domains[domain++]);

		for (Entry<String, String> e : corp.entrySet()) {
			String line = e.getKey();
			ParserAnnotation pa = pp.make(line);
			pa.setSkipSentDetection(true);
			ForumPost fp = fac.makePost("Corpus", pa, "Corpus");
			f.add(fp);
			fac.makeSent(line, fp);
		}
		return f;
	}

	@Override
	public boolean canCrawl() {
		return domain < domains.length;
	}

	@Override
	public void close() throws Exception {
	}
}
