package service.impl;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.http.HttpResponse;
import service.HadoopService;
import utills.JsonUtils;
import utills.ResponseUtils;

import javax.servlet.http.HttpServletResponse;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

/**
 * @author 杨铭
 * 2022/6/6,20:45
 */
public class HadoopServiceImpl implements HadoopService {
    Configuration conf = new Configuration();

    @Override
    public void upload(String fileUrl) {
        String resourceUrl = "E:\\code\\hadoop-2.7.3";
        String url = "hdfs://192.168.142.133:9000";
        System.setProperty("hadoop.home.dir", resourceUrl);
        try {
            FileSystem fileSystem = FileSystem.get(URI.create(url), conf);
            //本地文件
            Path srcPath = new Path(fileUrl);
            //上传到HDFS
            Path destPath = new Path("/training/hive/warehouse/hive.db/student");
            //使用Hadoop的copyFromLocalFile
            fileSystem.copyFromLocalFile(srcPath, destPath);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }



    @Override
    public void ajax(HttpServletResponse response,String code, String msg) {
        Map<String,String> map=new HashMap<>();
        map.put(code,msg);
        String json= JsonUtils.toJson(map);
        try{
            ResponseUtils.renderJson(response,json);
        }catch (Exception e){
            e.printStackTrace();
        }
    }


}
