package avicit.bdp.dds.server.utils;

import org.apache.hadoop.util.StringInterner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.List;

/**
 * @author mayi
 * 解析hadoop xml配置文件
 */
public class ParseHadoopXmlUtils {

    private static final Logger LOG = LoggerFactory.getLogger(ParseHadoopXmlUtils.class);

    public static void parseHadoopXml(List<String> args, String template, String xmlFilePath) {
        if (args == null) {
            args = new ArrayList<>();
        }
        Document doc;

        try {
            DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
            //ignore all comments inside the xml file
            docBuilderFactory.setIgnoringComments(true);

            //allow includes in the xml file
            docBuilderFactory.setNamespaceAware(true);
            try {
                docBuilderFactory.setXIncludeAware(true);
            } catch (UnsupportedOperationException e) {
                LOG.error("Failed to set setXIncludeAware(true) for parser " + docBuilderFactory, e);
            }

            File file = new File(xmlFilePath);
            if (file.exists()) {
                DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
                doc = builder.parse(new BufferedInputStream(new FileInputStream(file)));
            } else {
                return;
            }

            Element root = doc.getDocumentElement();
            if (!"configuration".equals(root.getTagName())) {
                LOG.error("bad conf file: top-level element not <configuration>");
                return;
            }

            NodeList props = root.getChildNodes();
            for (int i = 0; i < props.getLength(); i++) {
                Node propNode = props.item(i);
                if (!(propNode instanceof Element)) {
                    continue;
                }
                Element prop = (Element) propNode;
                if ("configuration".equals(prop.getTagName())) {
                    continue;
                }

                if (!"property".equals(prop.getTagName())) {
                    LOG.warn("bad conf file: element not <property>");
                    continue;
                }

                NodeList fields = prop.getChildNodes();
                String attr = null;
                String value = null;

                for (int j = 0; j < fields.getLength(); j++) {
                    Node fieldNode = fields.item(j);
                    if (!(fieldNode instanceof Element)) {
                        continue;
                    }
                    Element field = (Element) fieldNode;
                    if ("name".equals(field.getTagName()) && field.hasChildNodes()) {
                        attr = StringInterner.weakIntern(((Text) field.getFirstChild()).getData().trim());
                    }
                    if ("value".equals(field.getTagName()) && field.hasChildNodes()) {
                        value = StringInterner.weakIntern(((Text) field.getFirstChild()).getData());
                    }
                }

                args.add(String.format(template, attr, value));
            }
        } catch (Exception e) {
            LOG.error("error parsing conf " + xmlFilePath, e);
            throw new RuntimeException(e);
        }
    }

    public static void main(String[] args) {
        List<String> list = new ArrayList<>();
        String template = "--conf spark.hadoop.%s=%s";
        parseHadoopXml(list, template, "D:\\work\\hadoopconf\\hdfs-site.xml");
        for (String s : list) {
            System.out.println(s);
        }
    }

}
