package com.aisafer.fms.storage;
/*package com.aisafer.oss.storage;

import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import com.aisafer.oss.config.HdfsConfig;
import com.aisafer.oss.domain.AttachmentE;
import com.aisafer.oss.dro.AuthReq;
import com.aisafer.oss.dro.DownReq;
import com.aisafer.oss.enums.AttachmentStatusEnums;
import com.aisafer.oss.enums.StorageTypeEnums;
import com.aisafer.oss.exception.HdfsException;
import com.aisafer.oss.service.IStorageManager;
import com.aisafer.oss.service.impl.AttachmentService;
import com.aliyun.oss.ServiceException;


@Service("HDFS")
public class Hdfs extends AbstractStorage implements IStorageManager, IHdfs {

	private static final Logger log = LoggerFactory.getLogger(Hdfs.class);

	@Autowired
	private HdfsConfig hdfsConfig;

	@Autowired
	private AttachmentService attachmentService;

	@Override
	@Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = Exception.class)
	public void upload(AttachmentE attachmentE, byte[] content) {
		try {
			log.info("hdfsPath:{},attachid:{}", attachmentE.getStorageKey(), attachmentE.getAttachid());
			long begin = System.currentTimeMillis();
			attachmentService.saveAttach(attachmentE, StorageTypeEnums.HDFS,
					AttachmentStatusEnums.SUCCESS);
			this.upload(attachmentE.getStorageKey(), content);
			log.info("hdfs1 time : {}", (System.currentTimeMillis() - begin));
		} catch (Exception e) {
			log.error(e.getMessage(), e);
			throw new HdfsException(e.getMessage());
		}
	}

	@Override
	public byte[] read(AttachmentE atte) {
		try {
			return org.apache.commons.io.IOUtils.toByteArray(read(atte.getStorageKey()));
		} catch (IOException e) {
			log.error(e.getMessage());
		}
		return null;
	}

	@Override
	public String getAuthUrl(AuthReq req) throws ServiceException {
		return null;
	}

	@Override
	public boolean isFileExist(String attachid) {
		return false;
	}

	private static FileSystem getFielSystem(HdfsConfig hdfsConfig)
			throws IOException, InterruptedException, URISyntaxException {
		FileSystem fs = null;
		Configuration conf = getConfig(hdfsConfig);
		fs = FileSystem.get(new URI(hdfsConfig.getHdfsAddress()), conf, hdfsConfig.getUser());
		return fs;
	}

	private static Configuration getConfig(HdfsConfig hdfsConfig) {
		Configuration conf = new Configuration();
		// conf.set("fs.default.name", hdfsConfig.getHdfsAddress());
		return conf;
	}

	@Override
	public void upload(String dst, byte[] contents) {
		FileSystem fs = null;
		FSDataOutputStream outputStream = null;
		try {

			fs = getFielSystem(hdfsConfig);
			Path dstPath = new Path(dst);
			outputStream = fs.create(dstPath);
			outputStream.write(contents);
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
			throw new HdfsException(e.getMessage());
		} finally {
			IOUtils.closeStream(outputStream);
			IOUtils.closeStream(fs);
		}
	}

	@Override
	public void upload(String src, String dst) {
		FileSystem fs = null;
		try {
			FileSystem hdfs = getFielSystem(hdfsConfig);
			hdfs.copyFromLocalFile(false, new Path(src), new Path(dst));
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
			throw new HdfsException(e.getMessage());
		} finally {
			IOUtils.closeStream(fs);
		}
	}

	@Override
	public boolean rename(String oldName, String newName) {
		FileSystem fs = null;
		try {
			fs = getFielSystem(hdfsConfig);
			Path oldPath = new Path(oldName);
			Path newPath = new Path(newName);
			boolean isok = fs.rename(oldPath, newPath);
			return isok;
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
			return false;
		} finally {
			IOUtils.closeStream(fs);
		}
	}

	@Override
	public boolean delete(String filePath) {
		FileSystem fs = null;
		try {
			fs = getFielSystem(hdfsConfig);
			Path path = new Path(filePath);
			boolean isok = fs.deleteOnExit(path);
			return isok;
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
			return false;
		} finally {
			IOUtils.closeStream(fs);
		}
	}

	@Override
	public boolean mkdir(String path) {
		FileSystem fs = null;
		try {
			fs = getFielSystem(hdfsConfig);
			Path srcPath = new Path(path);
			boolean isok = fs.mkdirs(srcPath);
			return isok;
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
			return false;
		} finally {
			IOUtils.closeStream(fs);
		}
	}

	@Override
	public InputStream read(String filePath) {
		FileSystem fs = null;
		InputStream in = null;
		try {
			fs = getFielSystem(hdfsConfig);
			Path srcPath = new Path(filePath);
			in = fs.open(srcPath);
			return in;
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
		}
		return in;
	}

	@Override
	public byte[] readByte(String filePath) {
		FileSystem fs = null;
		InputStream in = null;
		try {
			fs = getFielSystem(hdfsConfig);
			Path srcPath = new Path(filePath);
			in = fs.open(srcPath);
			return org.apache.commons.io.IOUtils.toByteArray(in);
		} catch (IOException | InterruptedException | URISyntaxException e) {
			log.error(e.getMessage(), e);
		} finally {
			IOUtils.closeStream(in);
			IOUtils.closeStream(fs);
		}
		return new byte[] {};
	}

	@Override
	public boolean deleteFile(AttachmentE attachmentE) {
		return false;
	}

	@Override
	public byte[] read(DownReq req) throws ServiceException {

		return null;
	}

}
*/