package com.r7data.ht.example;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;

import com.r7data.ht.util.CommonUtil;

public class HdfsFileContrl {
	static Logger logger = CommonUtil.getLogger(HdfsFileContrl.class);
	
	String basePath;
	
	Configuration conf;
	FileSystem hfs;
	
	public HdfsFileContrl() throws Exception{
		conf = new Configuration();
		
		hfs = FileSystem.get(conf);
		
		String baseFs= conf.get("fs.defaultFS");
		
		basePath = baseFs + "/userDataFolder/hdfsTestFolder";
		
	}
	
	/**
	 * 创建文件夹
	 * @throws Exception
	 */
	public void makeDir() throws Exception {
		
		Path dirPath = new Path(basePath);
		
		if(hfs.exists(dirPath)){
			System.out.println("文件夹存在");
		}else{
			System.out.println("创建文件夹");
			hfs.mkdirs(new Path(basePath));
		}
		
		hfs.close();
	}

	
	/**
	 * 创建文件
	 * @throws IOException
	 */
	public void createFile() throws IOException {
		
		String textFile = basePath + "/testFile";
		
		String content = "新建文本文件测试";
		
		byte[] contentByte = content.getBytes();
		
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(textFile); // 目标路径
		// 打开一个输出流
		FSDataOutputStream outputStream = fs.create(dstPath);
		outputStream.write(contentByte);
		outputStream.close();
		fs.close();
		System.out.println("文件创建成功！");
	}
	
	/**
	 * 追加文件
	 * @throws IOException
	 */
	public void appendFile() throws IOException {
		
		String textFile = basePath + "/testFile";
		
		String content = "\r\n追加了一行文本\tabcdefghij@#\nover";
		
		byte[] contentByte = content.getBytes();
		
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(textFile); // 目标路径
		// 打开一个输出流
		FSDataOutputStream outputStream = fs.append(dstPath);
		outputStream.write(contentByte);
		outputStream.close();
		fs.close();
		System.out.println("文件追加成功！");
	}

	/**
	 * 上传本地文件
	 * @throws IOException
	 */
	public void uploadFile() throws IOException {
		String oldFile = "D:/testFile/hadoop/jdk-6u45-windows-x64.exe";
		
		String newFile = basePath;
		
		FileSystem fs = FileSystem.get(conf);
		
		Path oldPath = new Path(oldFile); // 原路径
		Path newPath = new Path(newFile); // 目标路径
		
		// 调用文件系统的文件复制函数,前面参数是指是否删除原文件，true为删除，默认为false
		System.out.println("正在上传文件...");
		fs.copyFromLocalFile(false, oldPath, newPath);

		// 打印文件路径
		System.out.println("Upload to " + conf.get("fs.default.name"));
		System.out.println("------------list files------------" + "\n");
		FileStatus[] fileStatus = fs.listStatus(newPath);
		for (FileStatus file : fileStatus) {
			System.out.println(file.getPath());
		}
		fs.close();
	}
	
	/**
	 * 删除文件
	 * @throws IOException
	 */
	public void deleteFile() throws IOException {
		
		String deleteFile = basePath + "/jdk-6u45-windows-x64.exe";
		
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path(deleteFile);
		
		boolean isok = fs.deleteOnExit(path);
		if (isok) {
			System.out.println("delete ok!");
		} else {
			System.out.println("delete failure");
		}
		fs.close();
	}

	/**
	 * 文件重命名
	 * @throws IOException
	 */
	public void renameFile() throws IOException {
		
		String oldFile = basePath + "/testFile";
		String newFile = basePath + "/newNameTestFile";
		
		
		FileSystem fs = FileSystem.get(conf);
		
		Path oldPath = new Path(oldFile);
		Path newPath = new Path(newFile);
		
		boolean isok = fs.rename(oldPath, newPath);
		if (isok) {
			System.out.println("rename ok!");
		} else {
			System.out.println("rename failure");
		}
		fs.close();
	}


	/**
	 * 读取文件的内容
	 * @throws IOException
	 */
	public void readFile() throws IOException {
		
		String file = basePath + "/blockFile";
		
		FileSystem fs = FileSystem.get(conf);
		Path filePath = new Path(file);
		
		FSDataInputStream input = null;
		try {
			input = fs.open(filePath);
			
			
			
			BufferedReader reader = new BufferedReader(new InputStreamReader(input));
			
			//byte[] readByte = new byte[1024];
			
			//int length = 0;
			
			String line = null;
			
			long i = 1l;
			while((line = reader.readLine()) != null){
				
				System.out.println("[" + i + "]" + line);
				
				i++;
			}
			
			reader.close();
			
			//IOUtils.copyBytes(input, System.out, 4096, false); // 复制到标准输出流
		} finally {
			IOUtils.closeStream(input);
		}
	}
	
	/**==================================================================================================================================**/
	
	int testCount = 99999;
	
	public String getLine(){
		
		String text = "ORACLE RAC 下归档模式一般是存储在 ASM 磁盘组里，我们目的是开启归档日志，并按 THREAD 号将归档日志进行本地存储 2.	各节点分别创建归档日志目录"
				+ "mkdir -p /u01/app/archivelog chown -R oracle.oinstall /u01/app/archivelog chmod -R 776 /u01/app/archivelog \r\n";
		
		return text;
	}
	
	public String getSmallLine(){
		String text = "12345678910";
		return text;
	}
	
	/**
	 * 多次关闭写入
	 * @throws Exception
	 */
	public void smallTest() throws Exception{
		String textFile = basePath + "/smallStreamFile";
		
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(textFile); // 目标路径
		
		// 打开一个输出流
		FSDataOutputStream outputStream = fs.create(dstPath);
		
		for(int i=0; i<testCount; i++){
			 
			for(int j=0; j<100; j++){
				String oneLine = getSmallLine();
				outputStream.write(oneLine.getBytes());
				outputStream.hflush();
			}
			
			outputStream.write("\r\n".getBytes());
			outputStream.hflush();
		}
		
		outputStream.close();
		fs.close();
	}
	
	/**
	 * 一次关闭写入
	 * @throws Exception
	 */
	public void bigTest() throws Exception{
		String textFile = basePath + "/bigStreamFile";
		
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(textFile); // 目标路径
		
		// 打开一个输出流
		FSDataOutputStream outputStream = fs.create(dstPath);
		
		//Writer hdfsWriter = new OutputStreamWriter(outputStream);
		
		for(int i=0; i<1; i++){
			StringBuilder builder = new StringBuilder();
			
			for(int j=0; j<1; j++){
				String oneLine = getSmallLine();
				builder.append(oneLine);
			}
			builder.append("\r\n");
			
			String newLine = builder.toString();
			outputStream.write(newLine.getBytes());
			
			outputStream.hflush();
			
		}
		
		outputStream.close();
		fs.close();
		
	}
	
	/**
	 * 创建导入文件
	 * @param path
	 * @throws Exception
	 */
	public void createLocalFile(String path) throws Exception{
		
		OutputStream output = new FileOutputStream(path);
		
		Writer writer = new OutputStreamWriter(output);
		BufferedWriter bufWriter = new BufferedWriter(writer);
		
		for(int i=0; i<testCount; i++){
			String oneLine = getLine();
			
			bufWriter.write(oneLine);
		}
		
		bufWriter.flush();
		
		bufWriter.close();
		writer.close();
		output.close();
		
	}
	
	/**
	 * HDFS 文件导入测试
	 */
	public void loadTest() throws Exception{
		String sourcePath = "D:/testFile/hadoop/loadSource.txt";
		
		createLocalFile(sourcePath);
		
		String newFile = basePath + "/loadSource.txt";
		
		FileSystem fs = FileSystem.get(conf);
		
		Path oldPath = new Path(sourcePath); // 原路径
		
		Path newPath = new Path(newFile); // 目标路径
		
		// 调用文件系统的文件复制函数,前面参数是指是否删除原文件，true为删除，默认为false
		System.out.println("正在上传文件...");
		fs.copyFromLocalFile(false, oldPath, newPath);

		// 打印文件路径
		System.out.println("Upload to " + conf.get("fs.default.name"));
		System.out.println("------------list files------------" + "\n");
		FileStatus[] fileStatus = fs.listStatus(newPath);
		for (FileStatus file : fileStatus) {
			System.out.println(file.getPath());
		}
		fs.close();
		
	}
	
	/**
	 * 测试 hsfs block 写入
	 * @throws Exception
	 */
	public void blockTest() throws Exception{
		String textFile = basePath + "/blockFile";
		
		String linePath = "D:/testFile/hadoop/line.txt";
		
		//String testLine = "12345678\r\n";
		String testLine = CommonUtil.getFileContent(linePath);
		
		testLine = "aaaaaaaaaa\r\n";
		
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(textFile); // 目标路径
		
		// 打开一个输出流
		FSDataOutputStream outputStream = null;
		
		if(fs.exists(dstPath)){
			fs.delete(dstPath, true);
			
			outputStream = fs.create(dstPath);
		}else{
			outputStream = fs.create(dstPath);
		}
		
		long totalSize = 0;
		
		for(int i=0; i<10; i++){
			
			byte[] tmpByte = testLine.getBytes();
			
			totalSize += tmpByte.length;
			
			outputStream.write(tmpByte);
			
			if(totalSize > (100*1024*1024)){
				
				System.out.println("100 MB writed ... ");
				
				break;
			}
			
		}
		
		System.out.println("write one time ... ");
		
		outputStream.hflush();
		
		outputStream.close();
		fs.close();
		
		System.out.println("close finished ... ");
		
	}
	
	public void mainTest() throws Exception{
		
		//创建文件夹
		makeDir();
		
		//写入文件
		createFile();
		
		//追加文件
		//appendFile();
		
		//上传文件
		//uploadFile();
		
		//删除文件
		//deleteFile();
		
		//重命名
		//renameFile();
		
		//读取文件
		//readFile();
		
	}
	
	
	public String getV(String value){
		
		return value;
	}
	
	/**
	 * 开启多个 os
	 */
	public void moreOsTest() throws Exception{
		
		//String textFile = basePath + "/blockFile";
		
		String linePath = "D:/testFile/hadoop/line.txt";
		
		//String testLine = "12345678\r\n";
		String testLine = CommonUtil.getFileContent(linePath);
		
		FileSystem fs = FileSystem.get(conf);
		
		List<FSDataOutputStream> outputList = new ArrayList<FSDataOutputStream>();
		
		
		
		for(int i=0; i<9999; i++){
			String tmpFile = basePath + "/" + i;
			Path dstPath = new Path(tmpFile); // 目标路径
			
			// 打开一个输出流
			FSDataOutputStream outputStream = fs.create(dstPath);
			
			outputList.add(outputStream);
		}
		
		for(FSDataOutputStream output : outputList){
			
			output.write(testLine.getBytes());
			
		}
		
		System.out.println("write finished ... ");
		
		for(FSDataOutputStream output : outputList){
			output.close();
		}
		fs.close();
		
		System.out.println("close finished ... ");
		
	}

	public static void main(String[] args) throws Exception {
		logger.info("start ... ");
		
		long startTime = System.currentTimeMillis();
		/** ============================================================== **/
		
		HdfsFileContrl con = new HdfsFileContrl();
		con.blockTest();
		
		/** ============================================================== **/
		long endTime = System.currentTimeMillis();
		
		long spendTime = endTime - startTime;

		logger.info("spend time : " + spendTime/1000 + " s.");
		
		logger.info("finished ... ");
		
		
		
		System.out.println("finished .... ");
		
	}

}
