/**
 * Copyright (C) 2010, 2011 Neofonie GmbH
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package eu.dicodeproject.blogimport.importer;

import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import eu.dicodeproject.blogimport.exceptions.DocumentStoreException;
import eu.dicodeproject.blogimport.exceptions.DocumentStoreNotAccessibleException;
import eu.dicodeproject.blogimport.parser.BlogDocument;
import eu.dicodeproject.blogimport.parser.ContentMapFields;

/**
 * Class that stores blog entries to HBase.
 */
public class HBaseBlogSink {
  /** Logger for this class. */
  private static final Logger LOG = LoggerFactory.getLogger(HBaseBlogSink.class);

  /** Column families. */
  private byte[] blogFamily;

  /** Blog table name. */
  private String blogTableName;

  /** Zookeeper Quorum. */
  private String zookeeperQuorum = "localhost";
  /** Zookeeper Port. */
  private int zookeeperPort = 2181;
  /** HBase configuration. */
  private Configuration configuration;

  private HTablePool hTablePool;

  /** Counter for logging. **/
  private static long counter;

  /** Map for tracking duplicates. */
  private Map<String, Integer> idCounts = new HashMap<String, Integer>();

  
  /**
   * If a {@link Configuration} was set, that instance will be used. Otherwise
   * {@link #init()} creates a new one using the values given by
   * {@link #setZookeeperPort(int)} and {@link #setZookeeperQuorum(String)}.
   * 
   * @throws IOException
   */
  @PostConstruct
  public void init() throws IOException {
    if (this.configuration == null) {
      this.configuration = new Configuration();
      this.configuration.clear();
      this.configuration.set("hbase.zookeeper.quorum", this.zookeeperQuorum);
      this.configuration.setInt("hbase.zookeeper.property.clientPort", this.zookeeperPort);
    }
    HBaseAdmin admin = new HBaseAdmin(this.configuration);

    initTable(this.blogTableName, admin, this.blogFamily);
    this.hTablePool = new HTablePool(this.configuration, 100);
  }

  @PreDestroy
  public void shutdown() {
    LOG.info("Shutting down blog Sink: " + counter + "Documents stored");
    try {
      this.hTablePool.close();
    } catch (IOException e) {
      LOG.error("Error closing table pool"+e.getMessage());
    }    
  }

  private void initTable(String tablename, HBaseAdmin admin, byte[]... families) throws IOException {
    if (!this.checkTable(tablename, admin)) {
      HTableDescriptor desc = new HTableDescriptor(tablename);
      for (byte[] family : families) {
        HColumnDescriptor colFam = new HColumnDescriptor(family);
        desc.addFamily(colFam);

        if (LOG.isInfoEnabled()) {
          StringBuilder message = new StringBuilder("Creating table ");
          message.append(tablename);
          message.append(" with column family " + Bytes.toString(family) + " as non were found.");
          LOG.info(message.toString());
        }
      }
      admin.createTable(desc);
    }
  }

  private boolean checkTable(final String tablename, final HBaseAdmin admin) throws IOException {
    HTableDescriptor[] descs = admin.listTables();
    for (HTableDescriptor desc : descs) {
      if (tablename.equals(desc.getNameAsString())) {
        return true;
      }
    }
    return false;
  }

  private void add(final byte[] family, final byte[] col, final String value, Put targetPut) {
    if (value != null) {
      targetPut.add(family, col, Bytes.toBytes(value));
    }
  }

  private void add(final byte[] family, final byte[] col, final long value, Put targetPut) {
    targetPut.add(family, col, Bytes.toBytes(value));
  }

  /**
   * 
   * @param entry
   * @throws DocumentStoreNotAccessibleException
   * @throws DocumentStoreException
   */
  public void store(final BlogDocument entry) throws DocumentStoreNotAccessibleException,
      DocumentStoreException {
    List<BlogDocument> entryList = new ArrayList<BlogDocument>();
    entryList.add(entry);
    this.store(entryList);
  }

  /**
   * Stores a blog entry into Hbase.
   * 
   * @param entry
   * @throws IOException
   */
  public void store(final List<BlogDocument> entries) throws DocumentStoreNotAccessibleException,
      DocumentStoreException {

    HTableInterface table = this.hTablePool.getTable(this.blogTableName);
    List<Put> puts = new ArrayList<Put>();

    for (BlogDocument entry : entries) {
      try {
        String key = createCompoundKey(entry);

        if (LOG.isInfoEnabled()) {
          int docCount = 1;

          if (this.idCounts.get(key) != null) {
            docCount = this.idCounts.get(key) + 1;
          }
          idCounts.put(key, docCount);
        }

        Put put = new Put(key.getBytes());
        add(blogFamily, ContentMapFields.BLOGTITLE.bytes(), entry.getBlogtitle(), put);
        add(blogFamily, ContentMapFields.LANGUAGE.bytes(), entry.getLanguage(), put);
        add(blogFamily, ContentMapFields.TITLE.bytes(), entry.getTitle(), put);
        add(blogFamily, ContentMapFields.URL.bytes(), entry.getUrl(), put);
        add(blogFamily, ContentMapFields.PUBLISHTIMESTAMP.bytes(), entry.getPublishTimestamp(), put);
        add(blogFamily, ContentMapFields.CATEGORIES.bytes(), entry.getCategories(), put);
        add(blogFamily, ContentMapFields.BODY.bytes(), entry.getBody(), put);
        add(blogFamily, ContentMapFields.AUTHOR.bytes(), entry.getAuthor(), put);
        add(blogFamily, ContentMapFields.TAGS.bytes(), entry.getTags(), put);
        add(blogFamily, ContentMapFields.AUTOTAGS.bytes(), entry.getAutotags(), put);

        if (put != null) {
          puts.add(put);
        }

        // We want to track the number of stored blog entries since restart
        if (++counter % 1000 == 0) {
          LOG.info(printIDCounts());
        }
        LOG.debug("Storing document " + counter + ": " + entry.getUrl());

      } catch (Exception e) {
        // we expect there to be parsing trouble and do not want to throw away
        // the whole batch of blog entries in that case.
        LOG.warn(
            "Error processing blog entry: "
                + e.getMessage()
                + " (Ignoring and continuing processing - be alarmed if that happens often for your desired definition of often.) ",
            e);
        LOG.warn("Entry that let to the exception: " + entry.getUrl());
      }
    }
    try {
      if (entries.size() == 1) {
        table.put(puts.get(0));
        LOG.debug("Adding single put");
      } else {
        // Puts some data in the table, in batch.
        table.put(puts);
        LOG.debug("Added batch of size " + puts.size());
      }
    } catch (RetriesExhaustedException e) {
      LOG.warn("There seems to be a problem with hbase, retrying write.", e);
      int attempts = 0;
      boolean success = false;
      while (attempts < 3 && !success) {
        attempts++;
        try {
          if (entries.size() == 1) {
            table.put(puts.get(0));
          } else {
            // Puts some data in the table, in batch.
            table.put(puts);
          }
          success = true;
        } catch (Exception e1) {
          LOG.warn("Retry " + attempts + " failed.", e1);
        }
      }
      if (!success) {
        throw new DocumentStoreException("Tried writing to hbase failed after 3 retrys, giving up.", e);
      }
    } catch (NotServingRegionException e) {
      LOG.warn("There seems to be a problem with hbase cluster. Possibly the cluster is down, retrying.", e);
      int attempts = 0;
      boolean success = false;
      while (!success) {
        attempts++;
        try {
          if (entries.size() == 1) {
            table.put(puts.get(0));
          } else {
            // Puts some data in the table, in batch.
            table.put(puts);
          }
          success = true;
        } catch (Exception e1) {
          LOG.warn("Retry " + attempts + " failed.", e1);
        }
      }
    } catch (Exception e) {
      throw new DocumentStoreException(e);
    } finally {      
      try {
        table.close();
      } catch (IOException e) {
        LOG.error("Error closing table "+e.getMessage());
      }
    }
  }

  /**
   * Creates a key consisting of date sting and url If no publish date is
   * available, we use the current date.
   * 
   * @param entry
   * @return a string consisting of date and url
   */
  public static String createCompoundKey(BlogDocument entry) {
    DateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm");
    String keyString = "";

    if (entry.getPublishTimestamp() > 0) {
      keyString += dateFormat.format(new Date(entry.getPublishTimestamp()));
    } else {
      keyString += dateFormat.format(new Date());
      LOG.info("No Publish timestamp available - using current date: " + entry.getUrl());
    }
    keyString += "_" + entry.getUrl();
    return keyString;
  }

  /**
   * @param blogFamily
   *          the blogFamily to set
   */
  public void setBlogFamily(String blogFamily) {
    this.blogFamily = blogFamily.getBytes();
  }

  /**
   * @param blogTableName
   *          the blogTableName to set
   */
  public void setBlogTableName(String blogTableName) {
    this.blogTableName = blogTableName.trim();
  }

  /**
   * @param zookeeperQuorum
   *          the zookeeperQuorum to set
   */
  public void setZookeeperQuorum(String zookeeperQuorum) {
    this.zookeeperQuorum = zookeeperQuorum;
  }

  /**
   * @param zookeeperPort
   *          the zookeeperPort to set
   */
  public void setZookeeperPort(int zookeeperPort) {
    this.zookeeperPort = zookeeperPort;
  }

  /**
   * @param configuration
   *          the configuration to set
   */
  void setHBaseConfiguration(Configuration configuration) {
    this.configuration = configuration;
  }

  /**
   * Debug output: duplicate entries (url and count).
   * 
   * @return
   */
  protected String printIDCounts() {
    StringBuilder output = new StringBuilder("");
    int total = 0;
    int duplicates = 0;
    int threshold = 20;

    for (String key : this.idCounts.keySet()) {
      int count = this.idCounts.get(key);
      total += count;
      if (count > 1) {
        duplicates += count - 1;        
        if (count > threshold) {
        output.append("Duplicate: " + key + " appeared " + count + " times\n");
        }
      }
    }
    String summary = "Aggregated duplicate statistics\ntotal documents processed: "+ duplicates + " duplicates ("
        + this.idCounts.keySet().size() + " unique documents stored) [total: "+total+"]\nThe following documents appeared " + threshold +" or more times\n";
    return summary + output.toString();
  }
}
