package com.iflyun.LogsCustmerToHDFS.utils;

import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;

import java.net.URI;


import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 
 * @ClassName:  HdfsUtils   
 * @Description:hdfs操作工具类
 * @author: wenquxing  
 * @date:   2017年8月25日 上午9:56:20   
 *
 */
public class HdfsUtils {
	
	private static Logger logger = LoggerFactory.getLogger(HdfsUtils.class);
	
 
	 public static void createOrAppend(String str,String url,String day,String folder) throws FileNotFoundException,IOException {		 		   
		 long time =System.currentTimeMillis();
		 System.setProperty("HADOOP_USER_NAME","root");
		 System.setProperty("HADOOP_USER_PASSWORD","xflc@2015");
		 String hdfs_path = url+"/log/"
		  		+ folder
		  		+ "/"+day+".txt";	
		  Path path=new Path(hdfs_path);
		  str=str+",";
		  Configuration conf = new Configuration();	
		  
		  conf.setBoolean("dfs.client.block.write.replace-datanode-on-failure.enable", true);
	      conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
	      conf.setBoolean("dfs.support.append", true);
	      conf.setBoolean("dfs.permissions", false);
		  FileSystem fs = FileSystem.get(URI.create(hdfs_path), conf);
		  boolean flag= fs.exists(path);
		  
          if(!flag) {
          	fs.create(path).close();
          }
          InputStream in = new ByteArrayInputStream(str.getBytes("UTF-8"));
          FSDataOutputStream out=fs.append(path);
          
		  IOUtils.copyBytes(in, out, 4096, true);	
		  fs.close();
		  logger.error("时间为:"+(System.currentTimeMillis()-time));
		  
		 
	 }
	 
	 public static boolean deleteDir(String url,String dir) throws IOException {  
	        if (StringUtils.isBlank(dir)) {  
	            return false;  
	        }  
	        dir = url + dir;  
	        Configuration conf = new Configuration();  
	        FileSystem fs = FileSystem.get(URI.create(dir), conf);  
	        fs.delete(new Path(dir), true);  
	        fs.close();  
	        return true;  
    }
	 
	 /**遍历HDFS上的文件和目录*/
	 @SuppressWarnings("unused")
	private static FileStatus[] getDirectoryFromHdfs() throws FileNotFoundException,IOException {
	  System.setProperty("HADOOP_USER_NAME","root");
	  System.setProperty("HADOOP_USER_PASSWORD","xflc@2015");
	  String dst = "hdfs://172.16.10.235:8022/log/api/2017-08-14";  
	  Configuration conf = new Configuration();  
	  FileSystem fs = FileSystem.get(URI.create(dst), conf);
	  FileStatus fileList[] = fs.listStatus(new Path(dst));
	  fs.close();
	  return fileList;
	 } 

}
