package tbdp.tool;

import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import org.apache.log4j.Logger;
import tbdp.tool.common.Config;
import tbdp.tool.common.Constants;
import tbdp.tool.common.Utils;

import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.*;

/**
 * Created by xbkaishui on 16/10/15.
 * 文件数据格式修复程序,负责遍历文件列表
 */
public class Cleaner {

    private static Logger logger = Logger.getLogger(Cleaner.class);
    private static Config config = Config.getInstance();
    private String outputBaseDir = config.getOutputDir();

    private ExecutorService workPool = Executors.newFixedThreadPool(config.getThreads());

    /**
     * 修复上证文件
     *
     * @param shDir
     */
    public void cleanSH(String shDir, final String date) {
        clean(shDir, date, Constants.sh);
    }

    public void clean(String shDir, final String date, String marketType) {

        logger.info(String.format("clean dir:\t%s  date:\t%s  marketType:\t%s", shDir, date, marketType));

        File dir = new File(shDir);
        Preconditions.checkArgument(dir.exists() && dir.isDirectory(), dir + " not  exists");

        File[] dataFiles = dir.listFiles(new FilenameFilter() {
            @Override
            public boolean accept(File dir, String name) {
                return name.startsWith(date) && name.endsWith(Constants.zipSuffix) && !filterMidDate(date,name);
            }
        });

        logger.info("data size " + dataFiles.length);
        logger.info("data list " + Arrays.deepToString(dataFiles));

        String baseDir = Joiner.on('/').join(outputBaseDir, date, marketType);
        logger.info("output baseDir " + baseDir);

        List<Future<Boolean>> resultList = new ArrayList<>();

        for (File dataFile : dataFiles) {
            logger.info("submit clean work task " + dataFile);
            Future<Boolean> rs = workPool.submit(new CleanWorker(baseDir, dataFile.getAbsolutePath(), marketType));
            resultList.add(rs);
        }

        logger.info("get results total " + resultList.size());
        int success = 0;
        for (Future<Boolean> rs : resultList) {
            try {
                boolean isSuccess = rs.get();
                success++;
                Preconditions.checkState(isSuccess, "clean work fail");
            } catch (Exception e) {
                logger.warn("statis : success " + success + " total " + resultList.size());
                logger.error("get clean task error", e);
//                throw new RuntimeException(e);
            }
        }
        logger.info("clean result done");
        //clean done
        logger.info("merge clean data ");
        List<String> outputs = Merge.merge(baseDir, null);
        logger.info("merge file done! " + outputs);
        for (String outputFile : outputs) {
            String fileName = new File(outputFile).getName();
            String hdfsFile = Utils.generateFinalOut(marketType, date, fileName);
            Uploader.upload(outputFile, hdfsFile);
            Utils.deleteFilesOnExit(new File(outputFile));
        }

        logger.info("upload file done! " + outputs);
    }

    /**
     * 过滤中午休市 记录
     * @param date
     * @param name
     * @return
     */
    public boolean filterMidDate(String date, String name) {
        String begin = String.format("%s_1135",date);
        String end = String.format("%s_1259",date);
        return (name .compareTo(begin) > 0  && name.compareTo(end)<0 );

    }

    /**
     * 修复深证文件
     *
     * @param szDir
     */
    public void cleanSZ(String szDir, String date) {
        clean(szDir, date, Constants.sz);
    }


    public void close(){
        workPool.shutdown();
    }

}
