/*
 * $Id: ShemRequestData.java,v 1.4 2009/10/27 23:06:34 afaichun Exp $
 * 
 * Copyright (C) 2008 General Electric Company. All Rights Reserved.
 * 
 * This software is the confidential and proprietary information of the General
 * Electric Company (GE). You shall not disclose this software and shall use it
 * only in accordance with the terms of the license agreement you entered into
 * with GE.
 * 
 * GE MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE
 * SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
 * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
 * NON-INFRINGEMENT. GE SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
 * AS A RESULT OF USING, MODIFYING, OR DISTRIBUTING THIS SOFTWARE OR ITS
 * DERIVATIVES.
 */
package com.ge.healthcare.autosc.processor.shemjob;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Vector;

import com.ge.healthcare.autosc.common.ASCLogger;
import com.ge.healthcare.autosc.common.ASCUtil;
import com.ge.healthcare.autosc.common.database.pojo.ActFileTransferDetails;
import com.ge.healthcare.autosc.common.dbLogger.StepLogger;
import com.ge.healthcare.autosc.common.exceptions.BasicException;
import com.ge.healthcare.autosc.common.exceptions.ZipperUtilException;
import com.ge.healthcare.autosc.common.requestData.GzippedRequestDataSet;
import com.ge.healthcare.autosc.common.requestData.GzippedTarRequestDataSet;
import com.ge.healthcare.autosc.common.requestData.RawRequestDataSet;
import com.ge.healthcare.autosc.common.requestData.RequestDataSet;
import com.ge.healthcare.autosc.common.util.ZipperUtil;
import com.ge.healthcare.autosc.common.util.tar.TarEntry;
import com.ge.healthcare.autosc.common.util.tar.TarInputStream;
/**
 * The class manages all SHEM data from the request. It sorts out data files
 * from the request based on their file type.
 * @author 212042946
 *
 */
public class ShemRequestData {

	Hashtable<String, RequestDataSet> availableDataHash = new Hashtable<String, RequestDataSet>();
	Hashtable<String, File> excludedFileHash = new Hashtable<String, File>();
	
	/**
	 * Constructor
	 * @param ftDetails list of files to be processed
	 * @param tdfFileName	Name of an archived file with data files in it.
	 * @throws BasicException
	 */
	public ShemRequestData(List<ActFileTransferDetails> ftDetails, String tdfFileName) 
		throws BasicException {
		if(tdfFileName != null) {
			processParamFile(ftDetails, tdfFileName);
		}
		generateDataSetHash(ftDetails);
		
		// Step logging
		StringBuffer data = new StringBuffer().append("Available data to process: ");
		Iterator<String> iter = availableDataHash.keySet().iterator();
		while(iter.hasNext()) {
			data.append("{").append((String)iter.next()).append("}");
		}
		StepLogger.addStepLog(data.toString());
		data = new StringBuffer().append("Data that is excluded to process: ");
		iter = excludedFileHash.keySet().iterator();
		while(iter.hasNext()) {
			data.append("{").append((String)iter.next()).append("}");
		}
		StepLogger.addStepLog(data.toString());
	}
	
    /**
     * Generate a list of the request's data sets.  If the request's data
     *  is viewed as a set of files in a parameter directory, the list of
     *  data sets would be the file names of the files in the parameter
     *  directory.
     * There is no guarantee that the name strings in the resulting array
     *  will appear in any specific order.  They are not, in particular,
     *  guaranteed to appear in alphabetical order.
     *
     * @return A list of the request's data sets.
     *
     * @exception AscServiceAbortException If unable to construct the list
     *  of data sets.
     */
    public ArrayList<String> getDataSetList() {
        ArrayList<String> resultList = new ArrayList<String>();
        Set<String> nameSet = availableDataHash.keySet();
        Iterator<String> nameIter = nameSet.iterator();
        while(nameIter.hasNext()) {
        	resultList.add(nameIter.next());
        }
        return resultList;
    }
	
    /**
     * Open, for reading, the specified data set from the request.
     *  The returned stream is buffered.  It is the responsibility
     *  of the calling code to close the stream.
     *
     * @param dataSetName Name of the data set to open.
     *
     * @return Buffered input stream to use for reading the data set.
     *
     * @exception AscServiceAbortException If unable to open the data set.
     */
    public InputStream openDataSet(String dataSetName)
        throws BasicException {
    	Object dataSetValue = this.availableDataHash.get(dataSetName);
    	if(dataSetValue == null) {
    		throw new BasicException("Data set " + dataSetName + " not found in request data");
    	}
        RequestDataSet dataSet = (RequestDataSet)dataSetValue;
        return dataSet.openDataSet();
    }
    
    /**
     * Return the data source object pointing to where the data set
     * @param dataSetName
     * @return
     * @throws BasicException
     */
    public RequestDataSet getDataSet(String dataSetName) 
    	throws BasicException {
    	RequestDataSet dataSetValue = this.availableDataHash.get(dataSetName);
    	if(dataSetValue == null) {
    		throw new BasicException("Data set " + dataSetName + " not found in request data");
    	}
    	return dataSetValue;
    }
    
	private void processParamFile(List<ActFileTransferDetails> ftDetails, String tdfFileName) 
		throws BasicException {
		ActFileTransferDetails paramFileDetail = getParamFileDetail(ftDetails, tdfFileName);
		if(paramFileDetail == null) {
			throw new BasicException("Cannot find File transfer details for param file " + tdfFileName);
		}

		File paramFile = new File(paramFileDetail.getFileDir(), tdfFileName);
		boolean isGzipTar = processParamFileAsZipTar(paramFileDetail.getFtId(), paramFile);
		if(!isGzipTar) {
			processParamFileAsZip(paramFile);
		}
	}
	
	private boolean processParamFileAsZipTar(Long ftId, File paramFile) 
		throws BasicException {
		boolean processed = false;
		if(ZipperUtil.isTarZipFormat(paramFile)) {
			try {
				List<GzippedTarRequestDataSet> dataFiles = readRequestDataSetFromTarZip(paramFile);
				for(GzippedTarRequestDataSet requestDataSet: dataFiles) {
					addAvailableDataSet(requestDataSet);
				}
				excludedFileHash.put(paramFile.getName(), paramFile);
				processed = true;
			} catch (ZipperUtilException zue) {
				throw new BasicException("Error reading file content from zip file " + paramFile.getPath(), zue);
			}
		} 
		return processed;
	}
	
	private boolean processParamFileAsZip(File paramFile){
		boolean processed = false;
		if(ZipperUtil.isZipFormat(paramFile)) {
			RequestDataSet requestDataSet = new GzippedRequestDataSet(paramFile.getName(), paramFile);
			addAvailableDataSet(requestDataSet);
			excludedFileHash.put(paramFile.getName(), paramFile);
			processed = true;
		}
		return processed;
	}
	
	private void addAvailableDataSet(RequestDataSet requestDataSet) {
		String key = requestDataSet.getName();
		if(availableDataHash.put(key, requestDataSet) != null)  {
			String msg = new StringBuffer()
				.append("Data set name ").append(key)
                .append(" is ambiguous.  Check the files in the ")
                .append("parameter file directory and the contents of ")
                .append("the files in the TDF name list.").toString();
			ASCLogger.error(this.getClass(), "addAvailableDataSet", msg, true);
		}
	}
	
	private ActFileTransferDetails getParamFileDetail(List<ActFileTransferDetails> ftDetails,
													  String paramFileName) {
		ActFileTransferDetails paramFileDetail = null;
		for(ActFileTransferDetails ftDetail: ftDetails) {
			if(ftDetail.getFileName().equalsIgnoreCase(paramFileName)) {
				paramFileDetail = ftDetail;
				break;
			}
		}
		return paramFileDetail;
	}
	
	private void generateDataSetHash(List<ActFileTransferDetails> ftDetails) {
		if(ASCUtil.isNullOrEmptyList(ftDetails) == false) {
			for(ActFileTransferDetails ftDetail: ftDetails) {
				if(!excludedFileHash.containsKey(ftDetail.getFileName())) {
					File dataFile = new File(ftDetail.getFileDir(), ftDetail.getFileName());
					RequestDataSet requestDataSet = 
						new RawRequestDataSet(ftDetail.getFileName(), dataFile);
					addAvailableDataSet(requestDataSet);
				}
			}
		}
	}
	
	private List<GzippedTarRequestDataSet> readRequestDataSetFromTarZip(File zipFile) 
		throws BasicException {
		TarInputStream tis = null;
		Vector<GzippedTarRequestDataSet> gzippedTarDataSet = new Vector<GzippedTarRequestDataSet>();
		InputStream zipStream = ZipperUtil.openAsZip(zipFile);
		if(zipStream != null) {
			try {
				tis = new TarInputStream(zipStream);
				TarEntry tarEntry = tis.getNextEntry();
				while(tarEntry != null) {
					String fileName = tarEntry.getName();
					if(tarEntry.isDirectory()) {
						gzippedTarDataSet.add(new GzippedTarRequestDataSet(fileName, zipFile, true));
					} else {
						gzippedTarDataSet.add(new GzippedTarRequestDataSet(fileName, zipFile));
					}
					tarEntry = tis.getNextEntry();
				}
			} catch (IOException ioe) {
				String errorStr = "Failed reading entry from tar content from zip stream";
				throw new BasicException(errorStr, ioe);
			} finally {
				ASCUtil.silentlyClose(tis);
			}
		}
		return gzippedTarDataSet;
	}
}
