package com.om.common;

import org.apache.http.client.utils.URIBuilder;
import org.apache.spark.SparkConf;
import org.bouncycastle.jcajce.provider.digest.MD2;

import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;

/**
 * @author zhxia
 * @date 2020/10/14 10:21
 */
public class CommonProperties implements Serializable {
    public static SparkConf sparkConf;
    public static final String SCHEME;
    public static final String HOST;
    public static final String[] HOSTS;
    public static final String USER;
    public static final String PASSWORD;
    public static final String PORT;
    public String[] VHOSTS;
    public static Properties conf;
    public static URI uri;
    public String esIndexName;
    public static String projectname;
    public static String ip_to_region;
    public static String auth;
    public static ArrayList filterdata=new ArrayList();

    static {
        conf = new Properties();
        try {
            InputStream resourceAsStream = CommonProperties.class.getResourceAsStream("/conf.properties");
            conf.load(resourceAsStream);
        } catch (IOException e) {
            e.printStackTrace();
        }
        SCHEME = String.valueOf(conf.get("es.scheme"));
        HOSTS = String.valueOf(conf.get("es.host")).split(",");
        USER = String.valueOf(conf.get("es.user"));
        PASSWORD = String.valueOf(conf.get("es.passwd"));
        HOST = HOSTS[0];
        PORT = String.valueOf(conf.get("es.port"));
        projectname = String.valueOf(conf.get("mirror.projectname"));
        ip_to_region = String.valueOf(conf.get("ip_region_table"));
        auth = "Basic " + Base64.getEncoder().encodeToString((USER + ":" + PASSWORD).getBytes());
        String[] filterpasses = conf.get("filterpass").toString().split("\\|");
        filterdata.addAll(Arrays.asList(filterpasses));
        try {
            uri = new URIBuilder().setScheme(SCHEME).setHost(HOST).setPort(Integer.parseInt(PORT)).setPath("/_bulk").build();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }

    }

    public static SparkConf getSparkConf(String[] args) {
        if (sparkConf == null) {
            synchronized (CommonProperties.class) {
                String ak = args[0];
                String sk = args[1];
                sparkConf = (new SparkConf()).setAppName(args[6]);
                sparkConf.set("fs.obs.access.key", ak);
                sparkConf.set("spark.hadoop.fs.secret.key", sk);
            }
        }
        return sparkConf;
    }

    public String[] getVHOSTS() {
        return VHOSTS;
    }

    public void setVHOSTS(String[] VHOSTS) {
        this.VHOSTS = VHOSTS;
    }

    public String getEsIndexName() {
        return esIndexName;
    }

    public void setEsIndexName(String esIndexName) {
        this.esIndexName = esIndexName;
    }
}
