package service;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * Created by luoqifei on 17-1-20.
 */
public class ReadFilesServer {
    private static final Logger logger = LoggerFactory.getLogger(ReadFilesServer.class);
    public final static ScheduledExecutorService zipTimer = Executors.newSingleThreadScheduledExecutor();
    private File[] files;
    private File[] delFiles;
    private List<String> filesName = new ArrayList<String>();
    private String basPath;
    private String[] childDirs;
    private static String hdfsUrl = "";//127.0.0.1:9000
    private String hdfsDestory = "/user/huineng1";
    private static int batchFileNumber = 100;
    private static int intervalTimeSeconds = 10;

    public ReadFilesServer(String basPath) {
        this.basPath = basPath;
    }

    public void startCompress() {
        File dir = new File(basPath);
        childDirs = dir.list();
        if (childDirs == null || childDirs.length == 0) {
            logger.error("there is any file directory under the path : " + basPath);
            return;
        } else {
            //zip file by scan every child directory
            for (String childDir : childDirs) {
                File childDirectoy = new File(basPath + File.separator + childDir);
                //1.check the dir files number great than batch compress number
                if (childDirectoy.listFiles().length < batchFileNumber) {
                    logger.debug("the directory " + childDirectoy.getName() + " just has " + childDirectoy.listFiles().length
                            + " files. less than we batch number " + batchFileNumber + ". we do nothing in this directory.");
                    continue;
                }
                //2.start to compress this directory files
                files = childDirectoy.listFiles();
                delFiles = new File[batchFileNumber];
                //2.1 get the fileName
                for (int i = 0; i < batchFileNumber; i++) {
                    logger.debug("the file :"+files[i].getAbsolutePath());
                    filesName.add(files[i].getAbsolutePath());
                    delFiles[i]=files[i];
                }
                //3. start to zip file
                //set the zip file name unit by system mils
                String fName = childDir+"-"+System.currentTimeMillis()+".zip";
                String zipFileName = basPath + File.separator+ fName;
                try {
                    ZipCompressByFiles zc = new ZipCompressByFiles(zipFileName);
                    zc.Compress(filesName);
                } catch (Exception e) {
                    logger.error("error happen when compress file into zip.");
                    e.printStackTrace();
                    continue;
                }
                //4. start to upload zip file
                boolean success = false;
                try {
                    success = uploadToKmx(zipFileName,fName);
                }catch (Exception e){
                    logger.error("error happen when upload "+zipFileName+" to hdfs.");
                }

                if (success) {
                    //4.1 upload success ,delete the file and zip
                    try {
                        //4.2 delete the zip file
                        FileUtils.forceDelete(new File(zipFileName));
                    } catch (Exception e) {
                        logger.error("error happen when delete zip file "+zipFileName+"after success upload to KMX.");
                        e.printStackTrace();
                    }
                    for (File successFile : delFiles) {
                        try {
                            FileUtils.forceDelete(successFile);
                        } catch (Exception e) {
                            logger.error("error happen when delete file "+successFile.getName()+" after success upload to KMX.");
                            e.printStackTrace();
                            continue;
                        }
                    }
                }
                //5. upload zip file fail
                else {
                    logger.error("error happen when upload zip file to Kmx HDFS.");
                    try {
                        //5.1 we just delete the zip file
                        FileUtils.forceDelete(new File(zipFileName));
                    } catch (Exception e) {
                        logger.error("error happen when delete zip file "+zipFileName+"after fail to upload to KMX.");
                        e.printStackTrace();
                    }
                }
            }
        }
    }

    public boolean uploadToKmx(String zipFile, String desFileName) {
        try {
            //final FileSystem hdfs = HadoopUtil.getFileSystem(conf);
            long start = System.currentTimeMillis();
            logger.info("Start to upload " + zipFile + " to hdfs " + hdfsDestory + "/ .");
            Path dstPath = new Path("hdfs://" + hdfsUrl + hdfsDestory+File.separator+desFileName);
            Configuration configuration = new Configuration();
            //configuration.set("fs.defaultFS",hdfsUrl);
            FileSystem hdfs = dstPath.getFileSystem(configuration);
            if(hdfs.exists(dstPath)) {
                hdfs.delete(dstPath, true);
            }
            hdfs.copyFromLocalFile(false, true, new Path(zipFile), dstPath);
            logger.info("upload-" + zipFile + "-costs:" + (System.currentTimeMillis() - start) / 1000 + " seconds.");
            return true;
        } catch (IOException e) {
            logger.error("error happen when upload file to hdfs.");
            e.printStackTrace();
            return false;
        }
    }

    public static void main(String args[]) {
        //set the para by config file or .jar args
        //para contains: basePath, hdfsUrl, batchFileNumber, intervalTimeSeconds

        if(args.length != 4){
            System.err.println("para contains: basePath, hdfsUrl, batchFileNumber, intervalTimeSeconds.");
            System.exit(1);
        }
        //set the loz file parent directory's absolution path
        String basePath = args[0];
        final ReadFilesServer readFilesServer = new ReadFilesServer(basePath);
        //set the hdfs url
        hdfsUrl = args[1];
        //default batch compress file number is 100
        batchFileNumber = Integer.valueOf(args[2]);
        //default compress file interval time is 10 second
        intervalTimeSeconds = Integer.valueOf(args[3]);


        zipTimer.scheduleAtFixedRate(new Runnable() {
                                         @Override
                                         public void run() {
                                             try {
                                                 logger.debug("start to compress file time is :  " + new Date());
                                                 readFilesServer.startCompress();
                                             } catch (Exception e) {
                                                 logger.error("error happen when exec compress timer.");
                                                 e.printStackTrace();
                                             }
                                         }
                                     }, 0, intervalTimeSeconds, TimeUnit.SECONDS);
    }
}
