package cn.myframe.controller;

import cn.myframe.config.HadoopTemplate;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;

/*******************
 *@Describe
 *@author 封神梦
 *@Time 2022/3/21
 *@Company http://com.yiview.com
 ****************/
@Controller
public class TestController {
    @RequestMapping(value = "indexPage")
    public String indexData(){
        return  "test";
    }


    @Autowired
    HadoopTemplate hadoop;
    @RequestMapping("/getResource")
    public void getResource(HttpServletRequest request, HttpServletResponse response) {

        try {
            String path = request.getParameter("source");
            response.setContentType("application/octet-stream");
            FileContext fc = FileContext.getFileContext(URI.create("hdfs://localhost:9000"));
            if(hadoop.existFile(path))
            {
                FSDataInputStream fsInput = fc.open(new Path(path));
                //int size = fsInput.available();
                OutputStream os = response.getOutputStream();
                InputStream in=  fsInput.getWrappedStream();
                int ch;
                while ((ch = in.read()) != -1) {
                    os.write(ch);
                }
                os.flush();
                os.close();
            }
            else {

            }

        }catch (Exception ex){
            ex.printStackTrace();
        }
    }
}
