package com.ehualu.impda.hadoop;

import com.ehualu.impda.kafkasecurity.LoginUtilKafkaHuawei;
import com.ehualu.impda.security.LoginUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;


public class HdfsExample2 {


    private final static Log LOG = LogFactory.getLog(HdfsExample2.class.getName());


    public static KafkaProducer<String, String> producer;

    public static String topic;

    public static Boolean isAsync;


    public static String bootstrapServers = "bootstrap.servers";

    public static String clientId = "client.id";

    public static String keySerializer = "key.serializer";

    public static String valueSerializer = "value.serializer";

    public static String securityProtocol = "security.protocol";

    public static String saslKerberosServiceName = "sasl.kerberos.service.name";

    public static String kerberosDomainName = "kerberos.domain.name";

    public static int messageNumToSend = 100;

    /**
     * 用户自己申请的机机账号keytab文件名称
     */
    /**
     * 用户自己申请的机机账号keytab文件名称
     */
    private static final String USER_KEYTAB_FILE = "user.keytab";

    /**
     * 用户自己申请的机机账号名称
     */
    private static final String USER_PRINCIPAL = "super_rj";


    private static final String STORAGE_POLICY_HOT = "HOT";
    private static String PATH_TO_HDFS_SITE_XML = HdfsExample2.class.getClassLoader().getResource("hdfs-site.xml")
            .getPath();
    private static String PATH_TO_CORE_SITE_XML = HdfsExample2.class.getClassLoader().getResource("core-site.xml")
            .getPath();

    private static Configuration conf = null;

    private static String PRNCIPAL_NAME = "super_rj";
    private static String PATH_TO_KEYTAB = HdfsExample2.class.getClassLoader().getResource("user.keytab").getPath();
    private static String PATH_TO_KRB5_CONF = HdfsExample2.class.getClassLoader().getResource("krb5.conf").getPath();

    //private static String PATH_TO_SMALL_SITE_XML = HdfsExample.class.getClassLoader().getResource("smallfs-site.xml")
    //.getPath();

    private FileSystem fSystem; /* HDFS file system */
    private String DEST_PATH;
    private String FILE_NAME;

    public HdfsExample2(String path, String fileName) throws IOException {
        this.DEST_PATH = path;
        this.FILE_NAME = fileName;
        instanceBuild();
    }

    /**
     * HDFS operator instance
     *
     * @throws IOException
     * @throws Exception
     */
    public void test() throws IOException {
        // create directory
        mkdir();

        // write file
        write();

        // append file
        append();

        // read file
        read();

        // delete file
        delete();

        // delete directory
        rmdir();
    }

    /**
     * build HDFS instance
     */
    private void instanceBuild() throws IOException {
        // get filesystem
        // 一般情况下，FileSystem对象JVM里唯一，是线程安全的，这个实例可以一直用，不需要立马close。
        // 注意：
        // 若需要长期占用一个FileSystem对象的场景，可以给这个线程专门new一个FileSystem对象，但要注意资源管理，别导致泄露。
        // 在此之前，需要先给conf加上：
        // conf.setBoolean("fs.hdfs.impl.disable.cache",
        // true);//表示重新new一个连接实例，不用缓存中的对象。
        fSystem = FileSystem.get(conf);
    }

    /**
     * delete directory
     *
     * @throws IOException
     */
    private void rmdir() throws IOException {
        Path destPath = new Path(DEST_PATH);
        if (!deletePath(destPath)) {
            LOG.error("failed to delete destPath " + DEST_PATH);
            return;
        }

        LOG.info("success to delete path " + DEST_PATH);

    }

    public static void securityPrepare() throws IOException {
        String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
        String krbFile = filePath + "krb5.conf";
        String userKeyTableFile = filePath + USER_KEYTAB_FILE;

        //windows路径下分隔符替换
        userKeyTableFile = userKeyTableFile.replace("\\", "\\\\");
        krbFile = krbFile.replace("\\", "\\\\");

        LoginUtilKafkaHuawei.setKrb5Config(krbFile);
        LoginUtilKafkaHuawei.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com");
        LoginUtilKafkaHuawei.setJaasFile(USER_PRINCIPAL, userKeyTableFile);
    }

    public static Boolean isSecurityModel() {
        Boolean isSecurity = false;
        String krbFilePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator + "kafkaSecurityMode";

        Properties securityProps = new Properties();

        // file does not exist.
        if (!isFileExists(krbFilePath)) {
            return isSecurity;
        }

        try {
            securityProps.load(new FileInputStream(krbFilePath));
            if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) {
                isSecurity = true;
            }
        } catch (Exception e) {
            LOG.info("The Exception occured : {}.", e);
        }

        return isSecurity;
    }

    /*
     * 判断文件是否存在
     */
    private static boolean isFileExists(String fileName) {
        File file = new File(fileName);

        return file.exists();
    }

    /**
     * create directory
     *
     * @throws IOException
     */
    private void mkdir() throws IOException {
        Path destPath = new Path(DEST_PATH);
        if (!createPath(destPath)) {
            LOG.error("failed to create destPath " + DEST_PATH);
            return;
        }

        LOG.info("success to create path " + DEST_PATH);
    }

    /**
     * set storage policy to path
     *
     * @param policyName
     *            Policy Name can be accepted:
     *            <li>HOT
     *            <li>WARN
     *            <li>COLD
     *            <li>LAZY_PERSIST
     *            <li>ALL_SSD
     *            <li>ONE_SSD
     * @throws IOException
     */

    /**
     * create file,write file
     *
     * @throws IOException
     */
    private void write() throws IOException {
        final String content = "hi, I am bigdata. It is successful if you can see me.";
        FSDataOutputStream out = null;
        try {
            out = fSystem.create(new Path(DEST_PATH + File.separator + FILE_NAME));
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to write.");
        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }


    private void writejs(String js) throws IOException {
        final String content = js;
        FSDataOutputStream out = null;
        try {
            out = fSystem.create(new Path(DEST_PATH + File.separator + FILE_NAME));
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to write.");
        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }

    private void createpath2(Path path) throws IOException {


        fSystem.create(path);

    }

    private boolean createPath3(final Path filePath) throws IOException {
        if (!fSystem.exists(filePath)) {
            fSystem.mkdirs(filePath);
        }
        return true;
    }

    /**
     * append file content
     *
     * @throws IOException
     */
    private void append() throws IOException {
        final String content = "I append this content.";
        FSDataOutputStream out = null;
        try {
            out = fSystem.append(new Path(DEST_PATH + File.separator + FILE_NAME));
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to append.");
        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }

    private void appendjs(String js, Path path) throws IOException {
        final String content = js;
        FSDataOutputStream out = null;
        try {
            out = fSystem.append(path);
            out.write(content.getBytes());
            out.hsync();
            LOG.info("success to append.");
        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }

    /**
     * read file
     *
     * @throws IOException
     */
    private void read() throws IOException {
        String strPath = "/user/hdfs-examples/hht/gcxx";
        System.out.println(strPath);
        Path path = new Path(strPath);
        FSDataInputStream in = null;
        BufferedReader reader = null;
        StringBuffer strBuffer = new StringBuffer();

        try {
            in = fSystem.open(path);
            reader = new BufferedReader(new InputStreamReader(in));
            String sTempOneLine;



            // write file
            while ((sTempOneLine = reader.readLine()) != null) {

                System.out.println(sTempOneLine);
                strBuffer.append(sTempOneLine);
            }
//            System.out.println(strBuffer.toString());

            LOG.info("result is : " + strBuffer.toString());
            LOG.info("success to read.");

        } finally {
            // make sure the streams are closed finally.
            IOUtils.closeStream(reader);
            IOUtils.closeStream(in);
        }
    }

    /**
     * delete file
     *
     * @throws IOException
     */
    private void delete() throws IOException {
        Path beDeletedPath = new Path(DEST_PATH + File.separator + FILE_NAME);
        if (fSystem.delete(beDeletedPath, true)) {
            LOG.info("success to delete the file " + DEST_PATH + File.separator + FILE_NAME);
        } else {
            LOG.warn("failed to delete the file " + DEST_PATH + File.separator + FILE_NAME);
        }
    }

    /**
     * create file path
     *
     * @param filePath
     * @return
     * @throws IOException
     */
    private boolean createPath(final Path filePath) throws IOException {
        if (!fSystem.exists(filePath)) {
            fSystem.mkdirs(filePath);
        }
        return true;
    }

    /**
     * delete file path
     *
     * @param filePath
     * @return
     * @throws IOException
     */
    private boolean deletePath(final Path filePath) throws IOException {
        if (!fSystem.exists(filePath)) {
            return false;
        }
        // fSystem.delete(filePath, true);
        return fSystem.delete(filePath, true);
    }

    private boolean hdfsfileexi(final Path filePath) throws IOException {

        return fSystem.exists(filePath);

//        if (!fSystem.exists(filePath)) {
//            return false;
//        }
//        // fSystem.delete(filePath, true);
//        return fSystem.delete(filePath, true);
    }

    private boolean hdfsfileexi2(Path path) throws IOException {
//        Path filePath = new Path(DEST_PATH + File.separator + FILE_NAME);
        return fSystem.exists(path);
//        if (fSystem.delete(beDeletedPath, true)) {
//            LOG.info("success to delete the file " + DEST_PATH + File.separator + FILE_NAME);
//        } else {
//            LOG.warn("failed to delete the file " + DEST_PATH + File.separator + FILE_NAME);
//        }
    }


    //
//    private void appendjs(String js, Path path) throws IOException {
//        final String content = js;
//        FSDataOutputStream out = null;
//        try {
//            out = fSystem.append(path);
//            out.write(content.getBytes());
//            out.hsync();
//            LOG.info("success to append.");
//        } finally {
//            // make sure the stream is closed finally.
//            IOUtils.closeStream(out);
//        }
//    }
//
    private void writeHDFS(String js) throws IOException {
        final String content = js;
        FSDataOutputStream out = null;
        Path path = new Path(DEST_PATH + File.separator + FILE_NAME);
        try {
            boolean fiex = fSystem.exists(path);

            if (fiex) {

                System.out.println("文件已经存在");
                out = fSystem.append(path);
                out.write(content.getBytes());
                out.hsync();
                LOG.info("append to write.");
            } else {

                System.out.println("文件不存在");


                out = fSystem.create(path);
                out.write(content.getBytes());
                out.hsync();
                LOG.info("success to write.");
            }

        } finally {
            // make sure the stream is closed finally.
            IOUtils.closeStream(out);
        }
    }

    private void writeHDFS2(String js,FSDataOutputStream out,Path path) throws IOException {
        final String content = js;
//        FSDataOutputStream out = null;
//        Path path = new Path(DEST_PATH + File.separator + FILE_NAME);
        try {
            boolean fiex = fSystem.exists(path);

            if (fiex) {

                System.out.println("文件已经存在");
                out = fSystem.append(path);
                out.write(content.getBytes());
//                out.hsync();
                LOG.info("append to write.");
            } else {

                System.out.println("文件不存在");


                out = fSystem.create(path);
                out.write(content.getBytes());
//                out.hsync();
                LOG.info("success to write.");
            }

        } finally {
            LOG.info("***********");
            // make sure the stream is closed finally.
//            IOUtils.closeStream(out);
        }
    }

    private void writeHDFS4(List<String> list1) throws IOException {
        FSDataOutputStream outap = null;
        FSDataOutputStream outcre = null;

        Path path = new Path(DEST_PATH + File.separator + FILE_NAME);
        try {
            boolean fiex = fSystem.exists(path);

            if (fiex) {

                System.out.println("文件已经存在");
                outap = fSystem.append(path);

                for (int i = 0; i < list1.size(); i++) {

                    outap.write(list1.get(i).getBytes());
                    outap.write("\n".getBytes());


                    LOG.info("append to write.");
                }
                outap.hsync();

            } else {

                System.out.println("文件不存在");


                outcre = fSystem.create(path);
                outcre.write(list1.get(0).getBytes());
                outcre.write("\n".getBytes());

                outcre.hsync();
                IOUtils.closeStream(outcre);
                outap = fSystem.append(path);

                for (int i = 1; i < list1.size(); i++) {

                    outap.write(list1.get(i).getBytes());
                    outap.write("\n".getBytes());


                    LOG.info("append to write.");
                }
                outap.hsync();

                LOG.info("success to write.");
            }

        } finally {
            LOG.info("***********");
            // make sure the stream is closed finally.
            IOUtils.closeStream(outap);


        }
    }

    private void writeHDFS3(String js,FSDataOutputStream out,Path path) throws IOException {
        final String content = js;
//        FSDataOutputStream out = null;
//        Path path = new Path(DEST_PATH + File.separator + FILE_NAME);
        try {
            boolean fiex = fSystem.exists(path);

            if (fiex) {

                System.out.println("文件已经存在");
                out = fSystem.append(path);
                out.write(content.getBytes());
                out.hsync();
                LOG.info("append to write.");
            } else {

                System.out.println("文件不存在");


                out = fSystem.create(path);
                out.write(content.getBytes());
                out.hsync();
                LOG.info("success to write.");
            }

        } finally {
            LOG.info("***********");
            // make sure the stream is closed finally.
//            IOUtils.closeStream(out);
        }
    }




    public static void main(String[] args) throws Exception {
        // 完成初始化和认证
        confLoad();
        authentication();
        securityPrepare();
        String hf="/test889999.txt";

        // 业务示例1：一个普通用例
        HdfsExample2 hdfs_examples = new HdfsExample2("/user/hdfs-examples", hf);


        List<String> list2 = new ArrayList<String>();
        hdfs_examples.read();

//
//        for (int num=0; num<500;num++){
//
//            list2.add("1933392984,3715001035,建设站前卡口南,2000-01-01 00:05:08:437000,3,鲁P71G27,2,由南向北,3,X99,G,02,0,http://37.158.66.59:6120/pic?5dd18=s04-z70b11464d3f6b--bcd4551d44b02icb2*=3d0*55d5i*s1d=i*p5i=t=pe6m6-95809c33ie77*e4id1=,http://37.158.66.59:6120/pic?9dd109z04-=s4b11706d3f6b--bcd4551d44b02icb2*=3d0*55d5i*s1d=i*p6i=t=pe6m409586-e33i9cd*e177i66=");
//
//        }
//
//
//
//        String result = "wirtr to hdfs ";
//        long a= System.currentTimeMillis();//获取当前系统时间(毫秒)
//        hdfs_examples.writeHDFS4(list2);
//
//        System.out.print("程序执行时间为：");
//        System.out.println(System.currentTimeMillis()-a+"毫秒");


//        FSDataOutputStream out=null;
//
//
//        Path filePath = new Path("/user/hdfs-examples" + File.separator + hf);
//
////        boolean hdfsr = hdfs_examples.hdfsfileexi2(filePath);
////        System.out.println(hdfsr);
//        System.out.println(result);
//        for (int num = 0; num < 3; num++) {
//
//            System.out.println("*******循环写入次数*****:" + num);
////            hdfs_examples.writeHDFS(result);
//
//
//
//
//
//            hdfs_examples.writeHDFS2(result,out,filePath);
//        }
//        out.hsync();
//        IOUtils.closeStream(out);

//        if (!hdfsr) {
//            System.out.println("*******开始创建文件*****:");
//            hdfs_examples.createpath2(filePath);
//
//
//        }
//
//        for (int num = 0; num < 3; num++) {
//
//            System.out.println("*******循环写入次数*****:" + num);
//            hdfs_examples.appendjs(result, filePath);
//        }


    }

    /**
     * Add configuration file if the application run on the linux ,then need
     * make the path of the core-site.xml and hdfs-site.xml to in the linux
     * client file
     */
    private static void confLoad() throws IOException {
        System.setProperty("java.security.krb5.conf", PATH_TO_KRB5_CONF);
        conf = new Configuration();
        // conf file
        conf.addResource(new Path(PATH_TO_HDFS_SITE_XML));
        conf.addResource(new Path(PATH_TO_CORE_SITE_XML));
        // conf.addResource(new Path(PATH_TO_SMALL_SITE_XML));
    }

    /**
     * kerberos security authentication if the application running on Linux,need
     * the path of the krb5.conf and keytab to edit to absolute path in Linux.
     * make the keytab and principal in example to current user's keytab and
     * username
     */
    private static void authentication() throws IOException {
        // security mode
        if ("kerberos".equalsIgnoreCase(conf.get("hadoop.security.authentication"))) {
            System.setProperty("java.security.krb5.conf", PATH_TO_KRB5_CONF);
            LoginUtil.login(PRNCIPAL_NAME, PATH_TO_KEYTAB, PATH_TO_KRB5_CONF, conf);
        }
    }

}

//class HdfsExampleThread extends Thread {
//	private final static Log LOG = LogFactory.getLog(HdfsExampleThread.class.getName());
//
//	/**
//	 *
//	 * @param threadName
//	 */
//	public HdfsExampleThread(String threadName) {
//		super(threadName);
//	}
//
//	public void run() {
//		HdfsExample example;
//		try {
//			example = new HdfsExample("/user/hdfs-examples/" + getName(), "test.txt");
//			example.test();
//		} catch (IOException e) {
//			LOG.error(e);
//		}
//	}
//}
