package org.bikelab.se3.index.siren;

import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.FSDirectory;
import org.bikelab.se3.index.Se3IndexWriter;
import org.sindice.siren.analysis.TupleAnalyzer;

public class SirenLuceneWriter implements Se3IndexWriter {
	private IndexWriter writer;
	private Path perm;
	private Path temp;
	private FileSystem fs;

	public SirenLuceneWriter() {
	}

	public void open(TaskAttemptContext context) throws IOException {
		this.fs = FileSystem.get(context.getConfiguration());
		perm = new Path(FileOutputFormat.getOutputPath(context), "idx");
		temp = new Path("tmp_" + context.getTaskAttemptID().toString());
		fs.delete(perm, true);
		writer = new IndexWriter(FSDirectory.open(new File(fs.startLocalOutput(
				perm, temp).toString())), new TupleAnalyzer(), true,
				MaxFieldLength.UNLIMITED);
	}

	public void close() throws IOException {
		writer.close();
		fs.completeLocalOutput(perm, temp);
	}

	public void write(Text key, Text val) throws IOException {
		final Document doc = new Document();
		doc.add(new Field("url", key.toString(), Store.YES,
				Index.NOT_ANALYZED_NO_NORMS));
		doc.add(new Field("content", val.toString(), Store.NO,
				Field.Index.ANALYZED_NO_NORMS));
		writer.addDocument(doc);
	}
}
