package com.qyer.search.storm.util;

import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import backtype.storm.tuple.Fields;
import com.google.gson.annotations.Expose;
import com.qyer.search.storm.exception.DBException;
import com.qyer.search.storm.exception.ESClientHostConfigException;
import com.qyer.search.storm.exception.MissConfigException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import backtype.storm.Config;
import backtype.storm.tuple.*;

import com.google.gson.*;
import com.google.gson.reflect.TypeToken;

import java.sql.DriverManager;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.settings.Settings;

import java.net.*;


/**
 * Created by tangzheng on 16/1/7.
 */
public class ConfigUtils {

  public static final Gson DEFAULT_GSON = new Gson();
  public static final int ZERO = 0;
  public static final int INIT_STAGE = 0;
  public static final String SYSTEM_COMPONENT_ID = "_system";
  public static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";

  public static final String COMMON_SPLIT = ",";
  public static final String TAGS_STREAM = "Tags&EntityStream";
  public static final String ENTITY_STREAM = "Tags&EntityStream";
  public static final String SPOUT_STREAM = "Tags&EntityStream";
  public static final String ZTAG_STREAM = "Tags&EntityStream";

  public static final String KAFKA_CONFIG_PREFIX = "kafka.";
  public static final String Fields_CONFIG_PREFIX = "spout.";
  public static final String SPOUT_COMMON_PREFIX = "spout.";
  public static final String INDEX_COMMON_PREFIX = "indexBolt.";

  public static final String IN = "IN";
  public static final String OUT = "OUT";
  public static final String PROCESS = "PROCESS";

  public static final Map<Integer, String> CONTINENT_MAP2 = new HashMap<>();
  public static final Map<String,Integer> START_POS_Map = new HashMap<>();

  /**
   * Storm configuration key pointing to a file containing kafka configuration ({@code "kafka.config"}).
   */
  public static final String CONFIG_FILE = "kafka.config";
  /**
   * Storm configuration key used to determine the kafka topic to read from ({@code "kafka.spout.topic"}).
   */
  public static final String CONFIG_TOPIC = "kafka.spout.topic";
  /**
   * Default kafka topic to read from ({@code "storm"}).
   */
  public static final String DEFAULT_TOPIC = "storm";
  /**
   * Storm configuration key used to determine the failure policy to use ({@code "kafka.spout.consumer.group"}).
   */
  public static final String CONFIG_GROUP = "kafka.spout.consumer.group";
  /**
   * Default kafka consumer group id ({@code "kafka_spout"}).
   */
  public static final String DEFAULT_GROUP = "kafka_spout";
  /**
   * Storm configuration key used to determine the maximum number of message to buffer
   * ({@code "kafka.spout.buffer.size.max"}).
   */
  public static final String CONFIG_BUFFER_MAX_MESSAGES = "kafka.spout.buffer.size.max";
  /**
   * Default maximum buffer size in number of messages ({@code 1024}).
   */
  public static final int DEFAULT_BUFFER_MAX_MESSAGES = 1024;
  private static final Logger LOG = LoggerFactory.getLogger(ConfigUtils.class);
  static {
    START_POS_Map.put("bjtj", 1);
    START_POS_Map.put("shhz", 2);
    START_POS_Map.put("gzsz", 3);
    START_POS_Map.put("cdcq", 4);

    START_POS_Map.put("inland", 5);
    START_POS_Map.put("hmt", 6);
    START_POS_Map.put("abroad", 7);

    CONTINENT_MAP2.put(10, "亚洲");
    CONTINENT_MAP2.put(12, "欧洲");
    CONTINENT_MAP2.put(76, "非洲");
    CONTINENT_MAP2.put(234, "北美");
    CONTINENT_MAP2.put(235, "南美");
    CONTINENT_MAP2.put(239, "大洋洲");
    CONTINENT_MAP2.put(759, "南极洲");
  }


  public static Properties configFromResource(final String resource) {
    InputStream input = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
    if (input == null) {
      // non-existent resource will *not* throw an exception, do this anyway
      throw new IllegalArgumentException("configuration file '" + resource + "' not found on classpath");
    }

    final Properties config = new Properties();
    try {
      config.load(input);
    }
    catch (final IOException e) {
      throw new IllegalArgumentException("reading configuration from '" + resource + "' failed", e);
    }
    return config;
  }

  public static Properties createKafkaConfig(final Map<String, Object> config) {
    final Properties consumerConfig;
    if (config.get(CONFIG_FILE) != null) {
      final String configFile = String.valueOf(config.get(CONFIG_FILE));
      // read values from separate config file
      LOG.info("loading kafka configuration from {}", configFile);
      consumerConfig = configFromResource(configFile);
    }
    else {
      // configuration file not set, read values from storm config with kafka prefix
      LOG.info("reading kafka configuration from storm config using prefix '{}'", KAFKA_CONFIG_PREFIX);
      consumerConfig = configFromPrefix(config, KAFKA_CONFIG_PREFIX);
      LOG.info("KAFKA_CONFIG:"+consumerConfig.toString());
    }

    // zookeeper connection string is critical, try to make sure it's present
    if (!consumerConfig.containsKey("zookeeper.connect")) {
      final String zookeepers = getStormZookeepers(config);
      if (zookeepers != null) {
        consumerConfig.setProperty("zookeeper.connect", zookeepers);
        LOG.info("no explicit zookeeper configured for kafka, falling back on storm's zookeeper ({})", zookeepers);
      }
      else {
        // consumer will fail to start without zookeeper.connect
        throw new IllegalArgumentException("required kafka configuration key 'zookeeper.connect' not found");
      }
    }

    // group id string is critical, try to make sure it's present
    if (!consumerConfig.containsKey("group.id") || String.valueOf(consumerConfig.get("group.id")).isEmpty()) {
      final Object groupId = config.get(CONFIG_GROUP);
      if (groupId != null && !String.valueOf(groupId).isEmpty()) {
        consumerConfig.setProperty("group.id", String.valueOf(groupId));
      }
      else {
        consumerConfig.setProperty("group.id", DEFAULT_GROUP);
        LOG.info("kafka consumer group id not configured or empty, using default ({})", DEFAULT_GROUP);
      }
    }

    // auto-committing offsets to zookeeper should be disabled
    if (!consumerConfig.containsKey("auto.commit.enable")) {
      consumerConfig.setProperty("auto.commit.enable", "false");
    }

    // check configuration sanity before returning
    checkConfigSanity(consumerConfig);
    return consumerConfig;
  }

  public static Properties configFromPrefix(final Map<String, Object> base, final String prefix) {
    final Properties config = new Properties();
    // load configuration from base, stripping prefix
    for (Map.Entry<String, Object> entry : base.entrySet()) {
      if (entry.getKey().startsWith(prefix)) {
        config.setProperty(entry.getKey().substring(prefix.length()), String.valueOf(entry.getValue()));
      }
    }

    return config;
  }

  public static String getStormZookeepers(final Map<String, Object> stormConfig) {
    final Object stormZookeepers = stormConfig.get(Config.STORM_ZOOKEEPER_SERVERS);
    final Object stormZookeepersPort = stormConfig.get(Config.STORM_ZOOKEEPER_PORT);
    if (stormZookeepers instanceof List && stormZookeepersPort instanceof Number) {
      // join the servers and the port together to a single zookeeper connection string for kafka
      final StringBuilder zookeepers = new StringBuilder();
      final int port = ((Number) stormZookeepersPort).intValue();

      for (final Iterator<?> iterator = ((List) stormZookeepers).iterator(); iterator.hasNext(); ) {
        zookeepers.append(String.valueOf(iterator.next()));
        zookeepers.append(':');
        zookeepers.append(port);
        if (iterator.hasNext()) {
          zookeepers.append(',');
        }
      }
      return zookeepers.toString();
    }

    // no valid zookeeper configuration found
    return null;
  }

  public static int getMaxBufSize(final Map<String, Object> stormConfig) {
    final Object value = stormConfig.get(CONFIG_BUFFER_MAX_MESSAGES);
    if (value != null) {
      try {
        return Integer.parseInt(String.valueOf(value).trim());
      }
      catch (final NumberFormatException e) {
        LOG.warn("invalid value for '{}' in storm config ({}); falling back to default ({})", CONFIG_BUFFER_MAX_MESSAGES, value, DEFAULT_BUFFER_MAX_MESSAGES);
      }
    }

    return DEFAULT_BUFFER_MAX_MESSAGES;
  }

  /**
   * Retrieves the topic to be consumed from storm's configuration map, or the {@link #DEFAULT_TOPIC} if no
   * (non-empty) value was found using {@link #CONFIG_TOPIC}.
   *
   * @param stormConfig Storm's configuration map.
   * @return The topic to be consumed.
   */
  public static String getTopic(final Map<String, Object> stormConfig) {
    if (stormConfig.containsKey(CONFIG_TOPIC)) {
      // get configured topic from config as string, removing whitespace from both ends
      final String topic = String.valueOf(stormConfig.get(CONFIG_TOPIC)).trim();
      if (topic.length() > 0) {
        return topic;
      }
      else {
        LOG.warn("configured topic found in storm config is empty, defaulting to topic '{}'", DEFAULT_TOPIC);
        return DEFAULT_TOPIC;
      }
    }
    else {
      LOG.warn("no configured topic found in storm config, defaulting to topic '{}'", DEFAULT_TOPIC);
      return DEFAULT_TOPIC;
    }
  }

  /**
   * Checks the sanity of a kafka consumer configuration for use in storm.
   *
   * @param config The configuration parameters to check.
   * @throws IllegalArgumentException When a sanity check fails.
   */
  public static void checkConfigSanity(final Properties config) {
    // auto-committing offsets should be disabled
    final Object autoCommit = config.getProperty("auto.commit.enable");
    if (autoCommit == null || Boolean.parseBoolean(String.valueOf(autoCommit))) {
      throw new IllegalArgumentException("kafka configuration 'auto.commit.enable' should be set to false for operation in storm");
    }

    // consumer timeout should not block calls indefinitely
    final Object consumerTimeout = config.getProperty("consumer.timeout.ms");
    if (consumerTimeout == null || Integer.parseInt(String.valueOf(consumerTimeout)) < 0) {
      throw new IllegalArgumentException("kafka configuration value for 'consumer.timeout.ms' is not suitable for operation in storm");
    }
  }

  public static Fields getSpoutFields(final Map<String, Object> config){
    Properties spoutFieldConfig = configFromPrefix(config, Fields_CONFIG_PREFIX);
    return new Fields(spoutFieldConfig.getProperty("spout.fields","s_index,s_type,s_eid,s_id,s_stage").trim().split(
      ","));
  }

  public static Values getSpoutValuse(byte[] message, int count){
    String msg = new String(message);
    KafkaMessage kafkaMessage =
      DEFAULT_GSON.fromJson(msg, new TypeToken<KafkaMessage>() {
      }.getType());

    return
      new Values(kafkaMessage.get_index(), kafkaMessage.get_type(), kafkaMessage.get_entityIds(),
                 count, kafkaMessage.get_stage());
  }

  public static Values getSpoutValuse2(String message, int msgCount)throws Exception{

    KafkaMessage kafkaMessage;
    try {
      kafkaMessage = DEFAULT_GSON.fromJson(message, new TypeToken<KafkaMessage>() {
      }.getType());
    }catch (Exception e){
      LOG.error("msgId:{} deserialize failed.");
      throw e;
    }

    return
      new Values(kafkaMessage.get_index(), kafkaMessage.get_type(), kafkaMessage.get_entityIds(),
                 msgCount, kafkaMessage.get_stage());
  }

  public static int getFailThreshold(final Map<String, Object> config){
    Properties spoutFailConfig = configFromPrefix(config, SPOUT_COMMON_PREFIX);
    return Integer.valueOf(spoutFailConfig.getProperty("fail.max", "5").trim());
  }

  public static Client createESClient(final Map<String, Object> config) throws Exception{
    Properties ESclientProp = configFromPrefix(config, INDEX_COMMON_PREFIX);
    String[] hosts = ESclientProp.getProperty("host", "esh1:9300,esh2:9300,esh3:9300").trim()
                                 .split(COMMON_SPLIT);
    LOG.info("ESClient:" + hosts);
    String clusterName = ESclientProp.getProperty("cluster.name", "helloworld").trim();

    Settings settings = Settings.settingsBuilder()
                                .put("cluster.name", clusterName)
                                .put("client.transport.sniff", true)
                                .build();
    TransportClient client = TransportClient.builder()
                                            .settings(settings)
                                            .build();
    try {
      for (int i = 0; i < hosts.length; i++) {
        String[] address = hosts[i].trim().split(":");
        client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(address[0]),
                                                                  Integer.valueOf(address[1])));
      }
    }catch (Exception e){
      LOG.error("ES client init failed,{}", e);
      throw new ESClientHostConfigException("ES client init failed", e);
    }
    return client;
  }

  public static Connection getConnection(final Map<String, Object> config, String BOLT_PREFIX)
    throws Exception{
    Properties connectionConfig = configFromPrefix(config, BOLT_PREFIX);
    String mysqlUrl;
    String mysqlUser;
    String mysqlPass;
    if(!connectionConfig.containsKey("mysql.url") || connectionConfig.getProperty("mysql.url").isEmpty()) {
      LOG.error("{}mysql.url missed!",BOLT_PREFIX);
      throw new MissConfigException("mysql.url missed!");
    }
    else
      mysqlUrl = connectionConfig.getProperty("mysql.url").trim();
    if(!connectionConfig.containsKey("mysql.user") || connectionConfig.getProperty("mysql.user").isEmpty()) {
      LOG.error("{}mysql.user missed!",BOLT_PREFIX);
      throw new MissConfigException("mysql.user missed!");
    }
    else
      mysqlUser = connectionConfig.getProperty("mysql.user").trim();
    if(!connectionConfig.containsKey("mysql.password") || connectionConfig.getProperty("mysql.password").isEmpty()) {
      LOG.error("{}mysql.password missed!",BOLT_PREFIX);
      throw new MissConfigException("mysql.password missed");
    }
    else
      mysqlPass = connectionConfig.getProperty("mysql.password").trim();
    try {
      Class.forName("com.mysql.jdbc.Driver");
      return DriverManager.getConnection(mysqlUrl, mysqlUser, mysqlPass);
    }catch (Exception e){
      LOG.error("{}db connection failed by {}", BOLT_PREFIX, e.getMessage());
      throw new DBException(BOLT_PREFIX + "db connection failed", e);
    }

  }

  public static PreparedStatement getPrepareStmt(final Map<String, Object> config,
                                                 String BOLT_PREFIX, Connection conn, String SQL_KEY)
    throws Exception {
    Properties connectionConfig = configFromPrefix(config, BOLT_PREFIX);
    String sql;
    if(!connectionConfig.containsKey(SQL_KEY) || connectionConfig.getProperty(SQL_KEY).isEmpty()){
      LOG.error("{}mysql missed!",BOLT_PREFIX);
      throw new MissConfigException(SQL_KEY + " missed");
    }else {
      sql = connectionConfig.getProperty(SQL_KEY).trim();
      LOG.info("test.sql:{}", sql);
    }
    return conn.prepareStatement(sql);
  }

  public static PreparedStatement getPrepareStmt(final Map<String, Object> config,
                                                 String BOLT_PREFIX, Connection conn)
    throws Exception {
    Properties connectionConfig = configFromPrefix(config, BOLT_PREFIX);
    String sql;
    if(!connectionConfig.containsKey("mysql.sql") || connectionConfig.getProperty("mysql.sql").isEmpty()){
      LOG.error("{}mysql missed!",BOLT_PREFIX);
      throw new MissConfigException("mysql.sql missed");
    }else {
      sql = connectionConfig.getProperty("mysql.sql").trim();
    }
    return conn.prepareStatement(sql);
  }

  public static PreparedStatement getPrepareStmt2(final Map<String, Object> config,
                                                 String BOLT_PREFIX, Connection conn)
    throws Exception {
    Properties connectionConfig = configFromPrefix(config, BOLT_PREFIX);
    String sql;
    if(!connectionConfig.containsKey("mysql.sql2") || connectionConfig.getProperty("mysql.sql2").isEmpty()){
      LOG.error("{}mysql.password missed!",BOLT_PREFIX);
      throw new MissConfigException("mysql.sql2 missed");
    }else {
      sql = connectionConfig.getProperty("mysql.sql2").trim();
    }
    return conn.prepareStatement(sql);
  }

  public static void constructSQL(PreparedStatement pstmt, List<Integer> lids, int inCount)
    throws Exception{
    try {

      int size = lids.size();
      int cc = 0;
      for (int i = 0, k = 1; i < size; i++, k++) {
        pstmt.setInt(k, lids.get(i));
        cc++;
      }
      for(int i = size + 1; i <= inCount; i++){
        pstmt.setInt(i, -1);
        cc++;
      }
      LOG.info("constructSQL---cc:{},incount:{},size:{}", cc,inCount,size);
    }catch (Exception e){
      LOG.error("constructSQL,{}", e);
      e.printStackTrace();
      throw new DBException("constructSQL error", e);
    }
  }
}

