package com.bridgeintelligent.tag.bulkload.service.looklike;

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.PrivilegedAction;
import java.util.List;

/**
 * Hdfs操作工具类
 *
 * @author shenjian@bridgeintelligent.com
 * @date 2020/7/27 15:58
 */
@Component
@Slf4j
public class HdfsUtil {

    private static String configFileDir;
    private static String user;
    private static String keytabFilePath;
    private static String krb5;

    @Value( "${hdfs.config-dir:/data/byxfusr/hdfsconf}" )
    public void setConfigFileDir( String configFileDir ) {
        this.configFileDir = configFileDir;
    }

    @Value( "${kerberos.user:cpmuser@BYDP.COM}" )
    public void setUser( String user ) {
        this.user = user;
    }

    @Value( "${kerberos.keytabFilePath:/data/byxfusr/kerberos/cpmuser.keytab}" )
    public void setKeytabFilePath( String keytabFilePath ) {
        this.keytabFilePath = keytabFilePath;
    }

    @Value( "${kerberos.krb5:/data/byxfusr/kerberos/krb5.conf}" )
    public void setKrb5( String krb5 ) {
        this.krb5 = krb5;
    }

    public static FileSystem getFileSystem() throws Exception {
        log.info( "\n\n======>\n" +
                          "          dir: {}\n" +
                          "         krb5: {}\n" +
                          "         user: {}\n" +
                          "       keytab: {}\n",
                  configFileDir, krb5, user, keytabFilePath );
        Configuration config = new Configuration();
        config.addResource( new Path( configFileDir, "core-site.xml" ) );
        config.addResource( new Path( configFileDir, "hdfs-site.xml" ) );
        config.set( "fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem" );
        config.set( "dfs.client.use.datanode.hostname", "true" );
        config.set( "hadoop.security.authentication", "kerberos" );
        System.setProperty( "java.security.krb5.conf", krb5 );
        UserGroupInformation.setConfiguration( config );
        try {
            UserGroupInformation.loginUserFromKeytab( user, keytabFilePath );
        }catch ( IOException e ) {
            log.error( "初始化 kerberos 连接发生异常",e );
        }
        return UserGroupInformation.getCurrentUser().doAs( (PrivilegedAction<FileSystem>) () -> {
            try {return FileSystem.get( config );}
            catch ( IOException e ) {throw new RuntimeException( e );}
        } );
    }

    /**
     * 创建文件夹
     */
    public static void mkdir( FileSystem fileSystem, String dir ) throws IOException {
        Path path = new Path( dir );
        if ( !fileSystem.exists( path ) ) {
            fileSystem.mkdirs( path );
        }
        log.info( "创建成功！" );
    }

    public static void uploadFile(FileSystem fileSystem,String localPath,String remotePath) throws IOException {
        fileSystem.copyFromLocalFile(new Path(localPath),new Path(remotePath));
    }
    /**
     * 上传文件
     */
    public static void putData( FileSystem fileSystem, String uploadFilePath, String fileName ) throws IOException {
        final FSDataOutputStream out = fileSystem.create( new Path( fileName ) );
        final FileInputStream in = new FileInputStream( uploadFilePath );
        IOUtils.copyBytes( in, out, 1024, true );
        log.info( "上传成功！" );
    }
    public static void writeDate(FileSystem fileSystem, List<String> list, String fileName ) throws IOException{
        log.info("==============================hdfs开始写文件："+fileName+"数量为:"+list.size());
        int i =0;
        StringBuffer sb = new StringBuffer();
        try (FSDataOutputStream out = fileSystem.create( new Path( fileName ) )){
            for (String s : list) {
                i++;
                sb.append(s);
                sb.append("\n");
                if(i == 5000){
                    out.writeBytes(sb.toString());
                    sb.setLength(0);
                }
            }
            out.writeBytes(sb.toString());
        } catch (IOException e) {
            log.error("hdfs写文件"+fileName+"失败",e);
            throw e;
        }
    }
    /**
     * 下载文件
     */
    public static void getData( FileSystem fileSystem, String downloadPath, String outPath ) throws IOException {
        final FSDataInputStream in = fileSystem.open( new Path( downloadPath ) );
        IOUtils.copyBytes( in, System.out, 1024, true );
        log.info( "下载文件！" );
    }

    /**
     * 浏览文件夹
     */
    public static void list( FileSystem fileSystem ) throws IOException {
        final FileStatus[] listStatus = fileSystem.listStatus( new Path( "/" ) );
        for ( FileStatus fileStatus : listStatus ) {
            String isDir = fileStatus.isDirectory() ? "文件夹" : "文件";
            final String permission = fileStatus.getPermission().toString();
            final short replication = fileStatus.getReplication();
            final long len = fileStatus.getLen();
            final String path = fileStatus.getPath().toString();
            System.out.println( isDir + "\t" + permission + "\t" + replication + "\t" + len + "\t" + path );
        }
        log.info( "浏览文件夹！" );
    }

    /**
     * 删除文件夹
     */
    public static void remove( FileSystem fileSystem, String dir ) throws IOException {
        fileSystem.delete( new Path( dir ), true );
        System.out.println( "删除文件夹！" );
    }

}
