package com.service.impl;


import com.service.IHadoopService;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.File;
import java.io.IOException;


@Service
public class HadoopServiceImpl implements IHadoopService {

    private final static Logger logger = LoggerFactory.getLogger(HadoopServiceImpl.class);


    @Value("${hdfs.path}")
    private String hdfsPath ;

    @Override
    public void uploadFileToHadoop(File sourceFile, File okFile) {
        FileSystem fs=null;
        try {
            fs=this.init();
            //.ok文件
            String okFileName = okFile.getName();
            String okFileHdfsUrl=this.hdfsPath+File.separator+okFileName ;
            Path okFileHdfsPath = new Path(okFileHdfsUrl);
            if(fs.exists(okFileHdfsPath)){
                fs.delete(okFileHdfsPath);
            }
            //不以.ok结尾文件
            String fileName = sourceFile.getName();
            String fileHdfsUrl=this.hdfsPath+File.separator+fileName;
            Path fileHdfsPath = new Path(fileHdfsUrl);
            if(fs.exists(fileHdfsPath)){
                fs.delete(fileHdfsPath);
            }
            String absolutePathSourceFile = sourceFile.getAbsolutePath();
            String absolutePathOkFile = okFile.getAbsolutePath();
            //不以.ok结尾文件上传Hdfs
            fs.copyFromLocalFile(true,new Path(absolutePathSourceFile),new Path(fileHdfsUrl));
            //.ok文件上传Hdfs
            fs.copyFromLocalFile(true,new Path(absolutePathOkFile),new Path(okFileHdfsUrl));
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("uploadFileToHadoop异常",e);
        } finally {
            if(null!=fs){
                try {
                    fs.close();
                } catch (IOException e) {
                    e.printStackTrace();
                    logger.error("uploadFileToHadoop==fs.close异常",e);
                }
            }
        }
    }

    @Override
    public void getFileFromHadoop() {

    }


    private FileSystem init() throws IOException {
        FileSystem fs=null ;
        //添加hadoop用户:只有hadoop的用户菜鸟操作hadoop上的文件:我阿里云hadoop是root用户启动的，所以是root
        System.setProperty("HADOOP_USER_NAME", "root");
        //创建一个配置文件默认加载core-site.xml
        Configuration configuration = new Configuration();
        fs=FileSystem.get(configuration);
        return fs ;
    }
}
