package cas.ihep.hadoop.util;

import cas.ihep.fs.UnifiedFileSystem;
import cas.ihep.util.MutableInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.util.StringUtils;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;

import javax.script.Bindings;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;

public class Utils{

    private static void resolveDirectory(Path dir,List<Path> ps,FileSystem fs,PathFilter filter) throws IOException {
        FileStatus files[]=fs.listStatus(dir,filter);
        if(files!=null){
            for(FileStatus f:files){
                if(f.isDirectory()){
                    resolveDirectory(f.getPath(),ps,fs,filter);
                }else{
                    ps.add(f.getPath());
                }
            }
        }
    }

    public static void addInputPaths(String path, List<Path> ps, FileSystem fs, PathFilter filter, String defaultFS) throws IOException {
        Path p;
        if(path.startsWith("hdfs://")){
            p=new Path(path);
        }else{
            path=new File(path).getCanonicalPath();
            if(path.startsWith("/hdfs/")){
                p=new Path(defaultFS+path.substring(5));
            }else{
                p=new Path(path);
            }
        }
        if(fs.isDirectory(p)){
            resolveDirectory(p,ps,fs,filter);
        }else{
            if(filter==null || filter.accept(p)){
                ps.add(p);
            }
        }
    }

    public static final String GlobalValueKey="ybj.global.value";
    public static final String RunfileKey="ybj.runfile.hdfs";
    public static final String RunfileUsetmp="ybj.runfile.tmp";
    public static final String TmpInputDirKey="ybj.hadoop.inputdir";
    public static final String GlobalValuePrefix="/tmp/globalvalue-";

    private static final Log LOG = LogFactory.getLog(Utils.class);

    private static void processPath(StringBuilder pathBuilder, Path path, FileSystem fs, List<URI> results){
        path=fs.makeQualified(path);
        String pathStr= StringUtils.escapeString(path.toString());
        LOG.info("Adding input file: "+pathStr);
        results.add(path.toUri());
        pathBuilder.append(pathStr).append(',');
    }

    private static void resolveDirectory(Path dir,StringBuilder pathBuilder,FileSystem hdfs,PathFilter filter,List<URI> results)throws IOException{
        FileStatus subfiles[]=filter==null?hdfs.listStatus(dir):hdfs.listStatus(dir,filter);
        if(subfiles!=null){
            for(FileStatus fs:subfiles){
                if(fs.isDirectory()){
                    resolveDirectory(fs.getPath(),pathBuilder,hdfs,filter,results);
                }else{
                    processPath(pathBuilder,fs.getPath(),hdfs,results);
                }
            }
        }
    }

    private static URI string2Path(String pathStr,String defaultFS)throws IOException,URISyntaxException{
        if(pathStr.startsWith("hdfs://") || pathStr.startsWith("alluxio://")){
            return new URI(pathStr);
        }else{
            pathStr=new File(pathStr).getCanonicalPath();
            if(pathStr.startsWith("/hdfs/")){
                return new URI(defaultFS+pathStr.substring(5));
            }else{
                return new URI("file://"+pathStr);
            }
        }
    }

    public static List<URI> addInputPaths(Configuration hdpConf, Bindings bd, FileSystem hdfs, String defaultFS) throws IOException, URISyntaxException {
        String regexp=(String)bd.remove("Hadoop_InputRegExp");
        PathFilter filter=null;
        if(regexp!=null){
            final Pattern pattern=Pattern.compile(regexp);
            filter=new PathFilter() {
                @Override
                public boolean accept(Path path) {
                    return pattern.matcher(path.getName()).matches();
                }
            };
        }
        List<URI> results=new ArrayList<>();
        LuaTable inputPaths=(LuaTable)bd.remove("Hadoop_InputDir");
        int sz=inputPaths.keyCount();
        StringBuilder pathBuilder=new StringBuilder();
        for(int i=0;i<sz;i++){
            LuaValue luaValue=inputPaths.get(i+1);
            if(luaValue.isstring()){
                Path path=new Path(string2Path(luaValue.tojstring(),defaultFS));
                if(hdfs.isDirectory(path)){
                    resolveDirectory(path,pathBuilder,hdfs,filter,results);
                }else if(filter==null || filter.accept(path)){
                    processPath(pathBuilder,path,hdfs,results);
                }
            }
        }
        sz=pathBuilder.length();
        if(sz>0){
            pathBuilder.deleteCharAt(sz-1);
        }
        hdpConf.set("mapreduce.input.fileinputformat.inputdir",pathBuilder.toString());
        return results;
    }

    public static void mkdirOutputPath(Bindings bd,String defaultFS,FileSystem hdfs) throws IOException,URISyntaxException {
        URI output=string2Path((String)bd.get("Hadoop_OutputDir"),defaultFS);
        String tmp=output.toString();
        LOG.info("output path: "+ tmp);
        bd.put("Hadoop_OutputDir",tmp);
        hdfs.mkdirs(new Path(output));
    }

    public static void mkdirOutputPath(Bindings bd, String defaultFS, UnifiedFileSystem ufs)throws IOException,URISyntaxException{
        URI output=string2Path((String)bd.get("Hadoop_OutputDir"),defaultFS);
        String tmp=output.toString();
        LOG.info("output path: "+ tmp);
        bd.put("Hadoop_OutputDir",tmp);
        ufs.mkdir(output);
    }
}
