package com.imocc.hadoop.spring;/**
 * Created by Administrator on 2019/2/25 0025.
 */

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import java.io.IOException;

/**
 * @author ydf
 * @com kt
 * @create 2019-02-25 上午 10:00
 * 使用spring hadoop来访问文件系统
 * https://docs.spring.io/spring-hadoop/docs/2.5.0.RELEASE/reference/html/springandhadoop-config.html#configuring-hadoop
 **/
public class SpringHadoopHDFSApp {
    private ApplicationContext ctx;
    private FileSystem fileSystem;

    /**
     * 创建hdfs文件夹
     * @throws IOException
     */
    @Test
    public void testMkdirs() throws IOException {
        fileSystem.mkdirs(new Path("/springhdfs/"));
    }

    /**
     * 读取HDFS文件内容
     * @throws IOException
     */
    @Test
    public void testText() throws IOException {
        FSDataInputStream in = fileSystem.open(new Path("/springhdfs/hello.txt"));
        IOUtils.copyBytes(in, System.out, 1024);
        in.close();
    }
    @Before
    public void setUp(){
        ctx=new ClassPathXmlApplicationContext("beans.xml");
        fileSystem=(FileSystem)ctx.getBean("fileSystem");
    }

    @After
    public void tearDown() throws IOException {
        ctx=null;
        fileSystem.close();
    }
}
