package com.htiiot.base;


import com.htiiot.config.Configs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimerTask;

/**
 * 该类用于定时上传日志文件至hdfs
 */
public class UploadFile extends TimerTask {
    private static Logger log = Logger.getLogger(UploadFile.class);
    private String rootDir;

    public UploadFile(String rootDir){
        this.rootDir = rootDir;
    }

    @Override
    public void run() {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        String subDir = sdf.format(new Date(new Date().getTime() - 24 * 60 * 60 * 1000));
        String filePath = rootDir+"/"+subDir+"/log.txt";
        if(new File(filePath).exists()){
            Path hdfsP = new Path(Configs.HdfsConfig.HDFS_ROOT_PATH+"/"+subDir);
            log.info("uploading file:"+filePath+" to hdfs:"+hdfsP);
            Configuration conf = new Configuration();
            FileSystem fs = null;
            try {
                 fs = FileSystem.get(new URI(Configs.HdfsConfig.HDFS_PATH),conf);
                 if(!fs.exists(hdfsP)){
                     fs.mkdirs(hdfsP);
                 }
                 fs.moveFromLocalFile(new Path(filePath),hdfsP);
                 log.info("Sucess upload to HDFS");
            } catch (Exception e) {
                log.error(e,e);
            }finally {
                if(fs!=null){
                    try {
                        fs.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }finally {
                        fs = null;
                    }
                }
            }
        }
    }

}
