package dacp.etl.kafka.hdfs.tools.writet;

import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.kafka.connect.sink.SinkRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import io.confluent.connect.hdfs.RecordWriter;

public class RecordWriterProvider {
	private static final Logger log = LoggerFactory.getLogger(RecordWriterProvider.class);
	private final static String EXTENSION = ".AVL";
	private static final String lineSeparator = "\n";

	public String getExtension() {
		return EXTENSION;
	}

	public RecordWriter<SinkRecord> getRecordWriter(Configuration conf, String fileName, SinkRecord record)
			throws IOException {

		Path path = new Path(fileName);
		final FSDataOutputStream out = path.getFileSystem(conf).create(path);
		final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out));
		

		log.info("getRecordWriter: " + path);

		return new RecordWriter<SinkRecord>() {
			@Override
			public void write(SinkRecord record) throws IOException {
				writer.write(record.value().toString() + lineSeparator);

			}

			@Override
			public void close() throws IOException {
				writer.close();
			}
		};

	}

}
