package com.lvmama.java.rhino.spark.core.hdfs;

import java.io.IOException;
import java.io.Serializable;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;

import com.lvmama.java.rhino.spark.utils.HdfsUtils;

public abstract class HdfsHelper implements IHdfsHelper, Serializable, InitializingBean, DisposableBean {

	private static final long serialVersionUID = 7741636612436499207L;
	private static final Logger LOGGER = Logger.getLogger(HdfsHelper.class);

	@Override
	public void destroy() throws Exception {
		HdfsUtils.getInstance().close();
	}

	@Override
	public final void save(Object obj) {
		try {
			FSDataOutputStream tempOutputStream = HdfsUtils.getInstance().getFsDataOutputStream(getTableName());
			save(tempOutputStream, obj);
		} catch (IOException e) {
			e.printStackTrace();
			LOGGER.error(e);
		}
	}

	public abstract void save(FSDataOutputStream outputStream, Object obj);

	protected abstract String getTableName();

	@Override
	public void afterPropertiesSet() throws Exception {
		HdfsUtils.getInstance();
	}
}
