/**
 * Copyright (C) 2010, 2011 Neofonie GmbH
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package eu.dicodeproject.analysis.restapi.oozie;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.Properties;

import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.WorkflowJob.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

@Component
public class TwitterVectorExporter {

  /** Logger for this class. */
  private static final Logger log = LoggerFactory.getLogger(TwitterVectorExporter.class);

  // cluster configuration
  private String hadoopNamenode;
  private String hadoopJobtracker;
  private String oozieUrl;
  private String oozieQueue;

  // job configuration
  private String oozieJobPath;
  private String oozieOutputDir;
  private String hadoopReducers;

  private static String EXPORT_DIR = "/twitterexport";
  private static String VECTOR_DIR = "/sparsevectors";
  private String exportPath;
  private String vectorPath;
  private OozieClient oozieClient;

  /**
   * 
   * @param wfDefinition
   * @param inputParams
   * @param dateString
   * @return
   * @throws OozieClientException
   */
  public String startJob(String wfDefinition, Properties inputParams, String dateString) throws OozieClientException {

    this.oozieClient = new OozieClient(this.oozieUrl);
    this.exportPath = this.oozieOutputDir + "/" + dateString + EXPORT_DIR;
    this.vectorPath = this.oozieOutputDir + "/" + dateString + VECTOR_DIR;

    Properties conf = oozieClient.createConfiguration();
    conf.setProperty(OozieClient.APP_PATH, wfDefinition);

    // setting workflow parameters
    conf.setProperty("jobTracker", this.hadoopJobtracker);
    conf.setProperty("nameNode", this.hadoopNamenode);
    conf.setProperty("queueName", this.oozieQueue);
    conf.setProperty("outputDir", this.oozieOutputDir);
    conf.setProperty("exportDir", this.exportPath);
    conf.setProperty("vectorOutputDir", this.vectorPath);
    conf.setProperty("numReducers", this.hadoopReducers);

    // add query parameters from rest service
    conf.putAll(inputParams);
    return oozieClient.run(conf);
  }

  private Status getJobStatus(String jobID) throws OozieClientException {
    WorkflowJob job = oozieClient.getJobInfo(jobID);
    return job.getStatus();
  }

  /**
   * 
   * @param params
   * @return
   * @throws OozieClientException
   * @throws InterruptedException
   * @throws IOException
   * @throws ArchiveException
   */
  public File getTwitterVectors(Properties params) throws OozieClientException, InterruptedException, IOException,
      ArchiveException {

    long startTime = System.currentTimeMillis();
    String dateString = new SimpleDateFormat("yyyyMMdd_HH_MM_ss").format(new Date());
    String jobId = this.startJob(this.oozieJobPath, params, dateString);
    Status status = this.getJobStatus(jobId);

    // TODO: check if Oozie can tell us if the job is done
    if (status == Status.RUNNING) {
      log.info("Started Oozie Job " + jobId);

    } else {
      log.error("Problem starting Ooozie Job " + jobId);
    }

    while (this.getJobStatus(jobId) == Status.RUNNING) {
      Thread.sleep(5000); // do nothing
    }
    long endTime = System.currentTimeMillis();
    log.info("Writing vectors for topic " + params.getProperty("topic") + " took " + ((endTime - startTime) / 1000)
	+ "seconds");
    this.copyFiles();

    String cleanTopic = params.getProperty("topic").replaceAll("\\W", "");

    File outputFile = new File(cleanTopic + dateString + ".tar.gz");
    File result = TwitterVectorExporter.zipResult(outputFile, new File("sparsevectors"));
    FileUtils.deleteDirectory(new File("sparsevectors"));
    return result;
  }

  /**
   * Copy files to local FS and clean up HDFS and local FS TODO: use unique
   * filenames and postpone cleanup
   */
  private void copyFiles() throws IOException {

    // copy files from HDFS to local
    FileSystem fs = FileSystem.get(URI.create(this.hadoopNamenode), new Configuration());
    fs.delete(new Path(this.vectorPath + "/tf-vectors/_logs"), true);
    fs.copyToLocalFile(new Path(this.vectorPath + "/tf-vectors"), new Path("sparsevectors"));
    fs.copyToLocalFile(new Path(this.vectorPath + "/dictionary.file-0"), new Path("sparsevectors"));
    // clean up HDFS
    fs.delete(new Path(this.vectorPath), true);
    fs.delete(new Path(this.exportPath), true);
  }

  /**
   * 
   * Zips up vector files and dictionary as tar.gz Files starting with _ or .
   * are excluded.
   * 
   */
  private static File zipResult(File outputFile, File inputDir) throws ArchiveException {

    try {
      FileOutputStream fos = new FileOutputStream(outputFile);
      GzipCompressorOutputStream gzos = new GzipCompressorOutputStream(fos);
      TarArchiveOutputStream tos = (TarArchiveOutputStream) new ArchiveStreamFactory().createArchiveOutputStream("tar",
	  gzos);

      Iterator<File> it = FileUtils.iterateFiles(inputDir, null, true);

      while (it.hasNext()) {

	File file = it.next();

	if (!file.getName().startsWith("_") && !file.getName().startsWith(".")) {
	  TarArchiveEntry entry = new TarArchiveEntry(file);
	  entry.setSize(file.length());
	  tos.putArchiveEntry(entry);
	  if (file.isFile()) {
	    org.apache.commons.compress.utils.IOUtils.copy(new FileInputStream(file), tos);
	  }
	  tos.closeArchiveEntry();
	}
      }
      tos.finish();
      tos.close();
      gzos.close();
      fos.close();
      return outputFile;
    } catch (FileNotFoundException e) {
      log.error(e.getMessage(), e);
    } catch (IOException e) {
      log.error(e.getMessage(), e);
    }
    return null;
  }

  /**
   * 
   * @param hadoopNamenode
   */
  public void setHadoopNamenode(String hadoopNamenode) {
    this.hadoopNamenode = hadoopNamenode;
  }

  /**
   * 
   * @param hadoopJobtracker
   */
  public void setHadoopJobtracker(String hadoopJobtracker) {
    this.hadoopJobtracker = hadoopJobtracker;
  }

  /**
   * 
   * @param oozieUrl
   */
  public void setOozieUrl(String oozieUrl) {
    this.oozieUrl = oozieUrl;
  }

  /**
   * 
   * @param oozieQueue
   */
  public void setOozieQueue(String oozieQueue) {
    this.oozieQueue = oozieQueue;
  }

  /**
   * 
   * @param oozieJobPath
   */
  public void setOozieJobPath(String oozieJobPath) {
    this.oozieJobPath = oozieJobPath;
  }

  /**
   * 
   * @param oozieOutputDir
   */
  public void setOozieOutputDir(String oozieOutputDir) {
    this.oozieOutputDir = oozieOutputDir;
  }

  /**
   * 
   * @param hadoopReducers
   */
  public void setHadoopReducers(String hadoopReducers) {
    this.hadoopReducers = hadoopReducers;
  }

}