package cn.cloud.mqtt2redis.service;

import cn.cloud.mqtt2redis.configuration.hdfs.HdfsConfig;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.IOException;

@Service
public class HdfsService {
    private Logger logger = LoggerFactory.getLogger(HdfsConfig.class);
    @Autowired
    private FileSystem hdfsClient;
    @Value("${hdfs.url}") String url;
    @Value("${hdfs.timeout}") String timeout;
    @Value("${hdfs.path}") String path;

    synchronized public void gfile_upload(String machineID, String gfile_name, String gfile_value) throws IOException{
        logger.info(machineID+":"+gfile_name+":Upload start...");
        Path hdfsPath=new Path(path+machineID+"/"+gfile_name);
        if(hdfsClient.exists(hdfsPath)){
            hdfsClient.delete(hdfsPath,true);
        }
        try {
            FSDataOutputStream hdfsOutStream = hdfsClient.create(hdfsPath);
            byte[] readBuf = gfile_value.getBytes("UTF-8");
            hdfsOutStream.write(readBuf, 0, readBuf.length);
            hdfsOutStream.close();
        }catch (Exception e){
            logger.error("error:"+e);
            return;
        }finally {
            hdfsClient.close();
        }
        logger.info(machineID+":"+gfile_name+":Upload completed!");
    }
}
