/*
 *  Copyright 2013 National Institute of Advanced Industrial Science and Technology
 *  
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *  
 *      http://www.apache.org/licenses/LICENSE-2.0
 *  
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.sss.server;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.Serializable;
import java.util.Properties;

import org.slf4j.Logger;
import org.sss.mapreduce.DBFactory;
import org.sss.mapreduce.IOConfiguration;
import org.sss.mapreduce.Resources;

@SuppressWarnings("serial")
public class SssServerConfiguration implements Serializable {
  private static Logger logger = SssServer.getLogger(SssServerConfiguration.class);

  public final IOConfiguration ioConf = new IOConfiguration();
  public int input_threads_max = 16;
  public int output_threads_max = 16;
  public int wk_threads_max = 16;
  public int inputtp_workqueue_limit = 16;
  public int outputtp_workqueue_limit = 16;
  public int wktp_workqueue_limit = 16;
  public int map_output_queue_multiplicity = 2;
  public int map_output_queue_limit_nbytes = 2097152;
  public int reduce_output_queue_multiplicity = 2;
  public int reduce_output_queue_limit_nbytes = 2097152;
  public int combine_queue_multiplicity = 2;
  public int combine_queue_limit_nbytes = 2097152;
  public int map_output_cache_nbytes = 128 * 1024;
  public int reduce_output_cache_nbytes = 128 * 1024;

  public SssServerConfiguration () {
  }

  public void setServerId(int id) {
    this.ioConf.processID = id;
  }

  public int getServerId() {
    return this.ioConf.processID;
  }

  public static SssServerConfiguration readFile() {
    SssServerConfiguration sssConf = new SssServerConfiguration();
    InputStream is = sssConf.getClass().getResourceAsStream("/conf/mapreduce.server-default.properties");
    Properties props = new Properties();

    //
    // read-in default server properties
    //
    try {
      props.load(is);
    }
    catch (Exception e) {
    }

    //
    // read-in optional server properties in the conf directory
    //
    try {
      File pfile = new File(Resources.getConfigDirectory(), "mapreduce.server.properties");
      is = new BufferedInputStream(new FileInputStream(pfile));
      props.load(is);
    }
    catch (Exception e) {
    }

    sssConf.ioConf.stream_protocol       = parse(props, "io.stream.protocol",       sssConf.ioConf.stream_protocol);
    sssConf.ioConf.get_batchcount        = parse(props, "io.get.batchcount",        sssConf.ioConf.get_batchcount);
    sssConf.ioConf.get_limit_nbytes      = parse(props, "io.get.limit_nbytes",      sssConf.ioConf.get_limit_nbytes);
    sssConf.ioConf.put_batchcount        = parse(props, "io.put.batchcount",        sssConf.ioConf.put_batchcount);
    sssConf.input_threads_max        = parse(props, "input.threads.max",        sssConf.input_threads_max);
    sssConf.output_threads_max       = parse(props, "output.threads.max",       sssConf.output_threads_max);
    sssConf.wk_threads_max           = parse(props, "worker.threads.max",       sssConf.wk_threads_max);
    sssConf.inputtp_workqueue_limit  = parse(props, "inputtp.workqueue.limit",  sssConf.inputtp_workqueue_limit);
    sssConf.outputtp_workqueue_limit = parse(props, "outputtp.workqueue.limit", sssConf.outputtp_workqueue_limit);
    sssConf.wktp_workqueue_limit     = parse(props, "wktp.workqueue.limit",     sssConf.wktp_workqueue_limit);
    sssConf.map_output_cache_nbytes          = parse(props, "map.output.cache.nbytes",    sssConf.map_output_cache_nbytes);
    sssConf.reduce_output_cache_nbytes       = parse(props, "reduce.output.cache.nbytes",    sssConf.reduce_output_cache_nbytes);
    sssConf.map_output_queue_multiplicity    = parse(props, "map.output_queue.multiplicity",    sssConf.map_output_queue_multiplicity);
    sssConf.map_output_queue_limit_nbytes    = parse(props, "map.output_queue.limit_nbytes",    sssConf.map_output_queue_limit_nbytes);
    sssConf.reduce_output_queue_multiplicity = parse(props, "reduce.output_queue.multiplicity", sssConf.reduce_output_queue_multiplicity);
    sssConf.reduce_output_queue_limit_nbytes = parse(props, "reduce.output_queue.limit_nbytes", sssConf.reduce_output_queue_limit_nbytes);
    sssConf.combine_queue_multiplicity       = parse(props, "combine.queue.multiplicity",       sssConf.combine_queue_multiplicity);
    sssConf.combine_queue_limit_nbytes       = parse(props, "combine.queue.limit_nbytes",       sssConf.combine_queue_limit_nbytes);

    sssConf.ioConf.db_factory = (DBFactory)SssServerConfiguration.parseClass(props, "db.factory.class", sssConf.ioConf.db_factory);
    sssConf.ioConf.db_use_vanilla = parse(props, "db.use.vanilla", sssConf.ioConf.db_use_vanilla);

    if (sssConf.inputtp_workqueue_limit < 16) {
      logger.warn("inputtp.workqueue.limit must be 16 or larger.({})", sssConf.inputtp_workqueue_limit);
      sssConf.inputtp_workqueue_limit = 16;
    }

    logger.info("set up SssServerConfiguration object...");
    logger.info(" .io_stream_protocol  = {}", sssConf.ioConf.stream_protocol);
    logger.info(" .io_get_batchcount   = {}", sssConf.ioConf.get_batchcount);
    logger.info(" .io_get_limit_nbytes = {}", sssConf.ioConf.get_limit_nbytes);
    logger.info(" .io_put_batchcount   = {}", sssConf.ioConf.put_batchcount);
    logger.info(" .input_threads_max   = {}", sssConf.input_threads_max);
    logger.info(" .output_threads_max  = {}", sssConf.output_threads_max);
    logger.info(" .wk_threads_max      = {}", sssConf.wk_threads_max);
    logger.info(" .inputtp_workqueue_limit  = {}", sssConf.inputtp_workqueue_limit);
    logger.info(" .outputtp_workqueue_limit = {}", sssConf.outputtp_workqueue_limit);
    logger.info(" .wktp_workqueue_limit = {}", sssConf.wktp_workqueue_limit);
    logger.info(" .map_output_cache_nbytes = {}", sssConf.map_output_cache_nbytes);
    logger.info(" .reduce_output_cache_nbytes = {}", sssConf.reduce_output_cache_nbytes);
    logger.info(" .map_output_queue_multiplicity = {}",    sssConf.map_output_queue_multiplicity);
    logger.info(" .map_output_queue_limit_nbytes = {}",    sssConf.map_output_queue_limit_nbytes);
    logger.info(" .reduce_output_queue_multiplicity = {}", sssConf.reduce_output_queue_multiplicity);
    logger.info(" .reduce_output_queue_limit_nbytes = {}", sssConf.reduce_output_queue_limit_nbytes);
    logger.info(" .combine.queue.multiplicity = {}",       sssConf.combine_queue_multiplicity);
    logger.info(" .combine.queue.limit = {}",              sssConf.combine_queue_limit_nbytes);

    logger.info(" .db_factory = {}", sssConf.ioConf.db_factory.getClass().getName());
    logger.info(" .db_use_vanilla = {}", sssConf.ioConf.db_use_vanilla);

    return sssConf;
  }

  private static boolean parse(Properties props, String key, boolean defval) {
    String property = props.getProperty("mapreduce.server." + key);
    if (property == null || property.length() == 0) {
      return defval;
    }
    else {
      return Boolean.parseBoolean(property);
    }
  }

  private static int parse(Properties props, String key, int defval) {
    String property = props.getProperty("mapreduce.server." + key);
    if (property == null || property.length() == 0) {
      return defval;
    }
    else {
      return Integer.parseInt(property);
    }
  }

  @SuppressWarnings("unused")
  private static long parse(Properties props, String key, long defval) {
    String property = props.getProperty("mapreduce.server." + key);
    if (property == null || property.length() == 0) {
      return defval;
    }
    else {
      return Long.parseLong(property);
    }
  }

  @SuppressWarnings("unused")
  private static String parse(Properties props, String key, String defval) {
	    String property = props.getProperty("mapreduce.server." + key);
	    if (property == null || property.length() == 0) {
	      return defval;
	    }
	    else {
	      return property;
	    }
  }

  private static Object parseClass(Properties props, String key,
			Object defval) {
    String property = props.getProperty("mapreduce.server." + key);
    if (property == null || property.length() == 0) {
      return defval;
    }
    else {
      try {
        Class<?> clazz = Class.forName(property);
        return clazz.newInstance();
      } catch (ClassNotFoundException e) {
        logger.error("Failed to get Class for " + property, e);
      } catch (InstantiationException e) {
        logger.error("Failed to get instance for class " + property, e);
      } catch (IllegalAccessException e) {
        logger.error("Failed to get instance for class " + property, e);
      }
    }
    return defval;
  }

}
