package com.lj.spring.hbase_hadoop;

import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import javax.annotation.Resource;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.testng.AbstractTestNGSpringContextTests;
import org.testng.Assert;

import org.testng.annotations.AfterSuite;

import org.testng.annotations.BeforeClass;

import org.testng.annotations.Test;


/**
 * @Transactional is use to make the method has transaction
 * 
 *                Test is okay
 * @author root
 * 
 */

@ContextConfiguration({ "classpath:spring/applicationContext.xml" })
// @Transactional
public class HadoopSpringTestFrame extends AbstractTestNGSpringContextTests {
	// @Autowired :Default use class type to match
	// 使用这个名字的原因是<hdp:configuration>默认的名字就是这个
	@Resource(name = "hadoopConfiguration")
	Configuration hadoopConfig;
	// 自动加载spring的context
	@Autowired
	private ApplicationContext ctx;

	URI uri;
	FileSystem hadoopFs;
	// private Log logger = LogFactory.getLog(HadoopSpringTestFrame.class);
	
	
	

	@Test
	public void testConfiguration() {
		if (hadoopConfig != null) {
			// logger.info("hadoop配置不为空");
			System.out.println(hadoopConfig.get("fs.defaultFS"));
		} else {
			// logger.info("hadoop配置为空");
		}
	}
	@BeforeClass
	public void beforeMethod() throws IOException{
		System.out.println("测试开始");
		uri = URI.create(hadoopConfig.get("fs.defaultFS"));
		hadoopFs = FileSystem.get(uri, hadoopConfig);
	}

	@Test
	public void testMakeDir() throws IOException{
		// 获取hadoop的uri
		 
		Path path = new Path("/test");

		Boolean result = hadoopFs.mkdirs(path);
		Assert.assertEquals(result, Boolean.TRUE);

	}

	/**
	 * 尝试创建文件，当上级目录不存在的时候，会自动创建上级目录
	 * @throws IOException
	 */
	@Test
	public void testCreateFile() throws IOException {
		
		FSDataOutputStream os = hadoopFs.create(new Path("/test/test.log"));
        os.write("Hello World!".getBytes());
        os.flush();
        os.close();
 
        // 显示在hdfs的/user/fkong下指定文件的内容
        InputStream is = hadoopFs.open(new Path("/test/test.log"));
        IOUtils.copyBytes(is, System.out, 1024, true);

	}
	
	
/*	public void testReadFile(){
		Path path = 
	}
	*/
	
	@Test
	public void testDeleteFile() throws IOException{
		Path path = new Path("/test");
		//recursive 迭代删除，类似于-r命令
		boolean result = hadoopFs.delete(path, true);
		Assert.assertEquals(result, true);
	}
	
	


	@Test
	public void testGetAllStatusFromDir() throws IOException {
		// 获取hadoop的uri
		FileStatus[] statuses = hadoopFs.listStatus(new Path("/test"));
        for (FileStatus status : statuses) {
            System.out.println(status);
            //可以使用get方法获取相应的数据
            //status.getPath();
        }

	}

	@Test
	public void testBean() {
		Assert.assertTrue(ctx.containsBean("hbaseTemplate"));

	}
	
	@AfterSuite
	public void afterMethod(){
		System.out.println("测试结束");
	}

}
