package com.saic.data.util;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.SecurityUtil;


/**
 * @DESC:HDFS操作
 * @author Xinshiyou
 */
public class HDFSUtil {

	public static final String ipFix = "hdfs://10.32.47.104:9000";
	public static final String preFix = "hdfs://nameservice1";
	public static final String hdfsHome = "/user/dsqoop/finance_platform/";

	private Configuration conf;
	private FileSystem fs;

	public HDFSUtil() {
		final String user = "dsqoop@SAIC.COM";
		final String keyPath = "/data/finance/kerberos/dsqoop.keytab";
		conf = new Configuration();
		conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
		conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
		conf.set("hdfs.keytab.file",keyPath);
		conf.set("hdfs.kerberos.principal",user);
        conf.addResource(new Path("/data/finance/conf/core-site.xml"));
        conf.addResource(new Path("/data/finance/conf/hdfs-site.xml"));
        conf.addResource(new Path("/data/finance/conf/yarn-site.xml"));
		System.setProperty("java.security.krb5.conf","/data/finance/kerberos/krb5.conf");
		
		try {
			//UserGroupInformation.loginUserFromKeytab(user, keyPath);
			SecurityUtil.login(conf, "hdfs.keytab.file","hdfs.kerberos.principal");
		} catch (Exception e) {
			// TODO: handle exception
			pm(this.getClass(), "Kerberos Login Error:");
			e.printStackTrace();
		}
		
		try {
			fs = FileSystem.get(conf);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			pm(this.getClass(), "Get FileSystem Error:");
			e.printStackTrace();
		}
	}

	/**
	 * @DESC:得到配置信息
	 */
	public Configuration getConf() {
		return this.conf;
	}

	/**
	 * @上传文件
	 * @param localFile
	 * @param hdfsPath
	 *            格式为HDFS://ip:port/destination
	 * @throws IOException
	 */
	public void upFile(String localFile, String hdfsPath) throws IOException {
		hdfsPath = preFix + hdfsPath;
		InputStream in = new BufferedInputStream(new FileInputStream(localFile));
		OutputStream out = fs.create(new Path(hdfsPath));
		IOUtils.copyBytes(in, out, conf);
	}

	/**
	 * @DESC:上传文件
	 * @param in
	 * @param hdfsPath
	 * @throws IOException
	 */
	public void upFile(InputStream in, String hdfsPath) throws IOException {
		hdfsPath = preFix + hdfsPath;
		FSDataOutputStream out = fs.create(new Path(hdfsPath));
		IOUtils.copyBytes(in, out, conf);
	}
	
	/**
	 * 附加文件
	 * 
	 * @param localFile
	 * @param hdfsPath
	 * @throws IOException
	 */
	public void appendFile(String localFile, String hdfsPath)
			throws IOException {
		hdfsPath = preFix + hdfsPath;
		InputStream in = new FileInputStream(localFile);
		OutputStream out = fs.append(new Path(hdfsPath));
		IOUtils.copyBytes(in, out, conf);
	}

	/**
	 * @下载文件
	 * @param hdfsPath
	 * @param localPath
	 * @throws IOException
	 */
	public void downFile(String hdfsPath, String localPath) throws IOException {
		hdfsPath = preFix + hdfsPath;
		InputStream in = fs.open(new Path(hdfsPath));
		OutputStream out = new FileOutputStream(localPath);
		IOUtils.copyBytes(in, out, conf);
	}

	/**
	 * @删除文件或目录
	 * @param hdfsPath
	 * @throws IOException
	 */
	public void delFile(String hdfsPath) throws IOException {
		hdfsPath = preFix + hdfsPath;
		fs.delete(new Path(hdfsPath), true);
	}

	/**
	 * @获得HDFS文件数据流
	 * @param path
	 * @return
	 * @throws IllegalArgumentException
	 * @throws IOException
	 */
	public  void copyTohdfs(String localpath,String hdfspath) throws IllegalArgumentException, IOException {
		fs.copyFromLocalFile(new Path(localpath), new Path(hdfspath));
	}
	
	public OutputStream getOutStream(String path)
			throws IllegalArgumentException, IOException {
		path = preFix + path;
		return fs.create(new Path(path));
	}

	/**
	 * @从HDSF读取文件
	 * @param file
	 * @throws IOException
	 */
	public void ReadFromHDFS(String hdfsPath) throws IOException {
		hdfsPath = preFix + hdfsPath;
		FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
		Path path = new Path(hdfsPath);
		FSDataInputStream in = fs.open(path);

		IOUtils.copyBytes(in, System.out, 4096, true);
	}

	/**
	 * @从HDFS删除文件
	 * @param file
	 * @throws IOException
	 */
	public static void DeleteHDFSFile(String file) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(file), conf);
		Path path = new Path(file);
		fs.delete(path, true);
		fs.close();
	}

	/**
	 * @删除文件到HDFS
	 * @param src
	 * @param dst
	 * @throws IOException
	 */
	public static void UploadLocalFileHDFS(String src, String dst)
			throws IOException {
		dst = preFix + dst;
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(dst), conf);
		Path pathDst = new Path(dst);
		Path pathSrc = new Path(src);

		fs.copyFromLocalFile(pathSrc, pathDst);
		fs.close();
	}

	/**
	 * @列出目录下的全部文件
	 * @param DirFile
	 * @return
	 * @throws IOException
	 */
	public List<Path> ListAll(String DirFile) throws IOException {

		FileSystem fs = FileSystem.get(URI.create(preFix + DirFile), conf);
		FileStatus[] status = fs.listStatus(new Path(DirFile));
		List<Path> list = new ArrayList<Path>();
		for (FileStatus f : status)
			list.add(f.getPath());

		return list;
	}
	private void pm(Class<?> cls, String msg) {
		pm(cls.getName(), msg);
	}

	private void pm(String cls, String msg) {
		String data = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date());
		System.out.println("["+data+"] Print by " + cls + "-->" + msg);
	}
}
